You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@storm.apache.org by ka...@apache.org on 2016/09/12 04:16:06 UTC

[01/10] storm git commit: STORM-1970: external project examples refator

Repository: storm
Updated Branches:
  refs/heads/1.x-branch bc0a1b8d0 -> 09bff55e0


http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrTopology.java b/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrTopology.java
deleted file mode 100644
index 92d90a3..0000000
--- a/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrTopology.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.solr.topology;
-
-import org.apache.solr.client.solrj.SolrClient;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.solr.config.SolrCommitStrategy;
-import org.apache.storm.solr.config.SolrConfig;
-
-import java.io.IOException;
-
-public abstract class SolrTopology {
-    protected static String COLLECTION = "gettingstarted";
-
-    public void run(String[] args) throws Exception {
-        final StormTopology topology = getTopology();
-        final Config config = getConfig();
-
-        if (args.length == 0) {
-            submitTopologyLocalCluster(topology, config);
-        } else {
-            submitTopologyRemoteCluster(args[0], topology, config);
-        }
-    }
-
-    protected abstract StormTopology getTopology() throws IOException;
-
-    protected void submitTopologyRemoteCluster(String arg, StormTopology topology, Config config) throws Exception {
-        StormSubmitter.submitTopology(arg, config, topology);
-    }
-
-    protected void submitTopologyLocalCluster(StormTopology topology, Config config) throws InterruptedException {
-        LocalCluster cluster = new LocalCluster();
-        cluster.submitTopology("test", config, topology);
-        Thread.sleep(10000);
-        System.out.println("Killing topology per client's request");
-        cluster.killTopology("test");
-        cluster.shutdown();
-        System.exit(0);
-    }
-
-    protected Config getConfig() {
-        Config config = new Config();
-        config.setDebug(true);
-        return config;
-    }
-
-    protected SolrCommitStrategy getSolrCommitStgy() {
-        return null;                          // To Commit to Solr and Ack every tuple
-    }
-
-    protected static SolrConfig getSolrConfig() {
-        String zkHostString = "127.0.0.1:9983";  // zkHostString for Solr gettingstarted example
-        return new SolrConfig(zkHostString);
-    }
-
-    protected static SolrClient getSolrClient() {
-        String zkHostString = "127.0.0.1:9983";  // zkHostString for Solr gettingstarted example
-        return new CloudSolrClient(zkHostString);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-solr/src/test/java/org/apache/storm/solr/trident/SolrFieldsTridentTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-solr/src/test/java/org/apache/storm/solr/trident/SolrFieldsTridentTopology.java b/external/storm-solr/src/test/java/org/apache/storm/solr/trident/SolrFieldsTridentTopology.java
deleted file mode 100644
index d022c8a..0000000
--- a/external/storm-solr/src/test/java/org/apache/storm/solr/trident/SolrFieldsTridentTopology.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.solr.trident;
-
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.solr.spout.SolrFieldsSpout;
-import org.apache.storm.solr.topology.SolrFieldsTopology;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.state.StateFactory;
-
-import java.io.IOException;
-
-public class SolrFieldsTridentTopology extends SolrFieldsTopology {
-    public static void main(String[] args) throws Exception {
-        SolrFieldsTridentTopology solrFieldsTridentTopology = new SolrFieldsTridentTopology();
-        solrFieldsTridentTopology.run(args);
-    }
-
-    protected StormTopology getTopology() throws IOException {
-        final TridentTopology tridentTopology = new TridentTopology();
-        final SolrFieldsSpout spout = new SolrFieldsSpout();
-        final Stream stream = tridentTopology.newStream("SolrFieldsSpout", spout);
-        final StateFactory solrStateFactory = new SolrStateFactory(getSolrConfig(), getSolrMapper());
-        stream.partitionPersist(solrStateFactory, spout.getOutputFields(),  new SolrUpdater(), new Fields());
-        return tridentTopology.build();
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-solr/src/test/java/org/apache/storm/solr/trident/SolrJsonTridentTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-solr/src/test/java/org/apache/storm/solr/trident/SolrJsonTridentTopology.java b/external/storm-solr/src/test/java/org/apache/storm/solr/trident/SolrJsonTridentTopology.java
deleted file mode 100644
index 75131b8..0000000
--- a/external/storm-solr/src/test/java/org/apache/storm/solr/trident/SolrJsonTridentTopology.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.solr.trident;
-
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.solr.spout.SolrJsonSpout;
-import org.apache.storm.solr.topology.SolrJsonTopology;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.state.StateFactory;
-
-import java.io.IOException;
-
-public class SolrJsonTridentTopology extends SolrJsonTopology {
-    public static void main(String[] args) throws Exception {
-        SolrJsonTridentTopology solrJsonTridentTopology = new SolrJsonTridentTopology();
-        solrJsonTridentTopology.run(args);
-    }
-
-    protected StormTopology getTopology() throws IOException {
-        final TridentTopology topology = new TridentTopology();
-        final SolrJsonSpout spout = new SolrJsonSpout();
-        final Stream stream = topology.newStream("SolrJsonSpout", spout);
-        final StateFactory solrStateFactory = new SolrStateFactory(getSolrConfig(), getSolrMapper());
-        stream.partitionPersist(solrStateFactory, spout.getOutputFields(),  new SolrUpdater(), new Fields());
-        return topology.build();
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-solr/src/test/java/org/apache/storm/solr/util/TestUtil.java
----------------------------------------------------------------------
diff --git a/external/storm-solr/src/test/java/org/apache/storm/solr/util/TestUtil.java b/external/storm-solr/src/test/java/org/apache/storm/solr/util/TestUtil.java
deleted file mode 100644
index 84a866b..0000000
--- a/external/storm-solr/src/test/java/org/apache/storm/solr/util/TestUtil.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.solr.util;
-
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-public class TestUtil {
-    public static String getDate() {
-        DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
-        return df.format(new Date());
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 15871ac..a1466c9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -277,6 +277,8 @@
         <module>storm-buildtools/storm-maven-plugins</module>
         <module>storm-core</module>
         <module>storm-rename-hack</module>
+
+        <!-- externals -->
         <module>external/storm-kafka</module>
         <module>external/storm-hdfs</module>
         <module>external/storm-hbase</module>
@@ -292,13 +294,26 @@
         <module>external/storm-cassandra</module>
         <module>external/storm-mqtt</module>
         <module>external/storm-mongodb</module>
-        <module>examples/storm-starter</module>
-        <module>external/storm-kafka-client</module>
-        <module>external/storm-opentsdb</module>
-        <module>external/storm-kafka-monitor</module>
         <module>external/storm-kinesis</module>
         <module>external/storm-druid</module>
         <module>external/storm-submit-tools</module>
+        <module>external/storm-kafka-client</module>
+        <module>external/storm-opentsdb</module>
+        <module>external/storm-kafka-monitor</module>
+
+        <!-- examples -->
+        <module>examples/storm-starter</module>
+        <module>examples/storm-mongodb-examples</module>
+        <module>examples/storm-redis-examples</module>
+        <module>examples/storm-opentsdb-examples</module>
+        <module>examples/storm-solr-examples</module>
+        <module>examples/storm-kafka-examples</module>
+        <module>examples/storm-jdbc-examples</module>
+        <module>examples/storm-hdfs-examples</module>
+        <module>examples/storm-hbase-examples</module>
+        <module>examples/storm-hive-examples</module>
+        <module>examples/storm-elasticsearch-examples</module>
+        <module>examples/storm-mqtt-examples</module>
     </modules>
 
     <dependencies>


[10/10] storm git commit: add STORM-1970 to CHANGELOG

Posted by ka...@apache.org.
add STORM-1970 to CHANGELOG


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/09bff55e
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/09bff55e
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/09bff55e

Branch: refs/heads/1.x-branch
Commit: 09bff55e0f3271c8766e69754092758045fc7ce2
Parents: a214603
Author: Jungtaek Lim <ka...@gmail.com>
Authored: Mon Sep 12 13:15:31 2016 +0900
Committer: Jungtaek Lim <ka...@gmail.com>
Committed: Mon Sep 12 13:15:31 2016 +0900

----------------------------------------------------------------------
 CHANGELOG.md | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/09bff55e/CHANGELOG.md
----------------------------------------------------------------------
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0ef2fc0..368a7fe 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,5 @@
 ## 1.1.0
+ * STORM-1970: external project examples refator
  * STORM-2074: fix storm-kafka-monitor NPE bug
  * STORM-1459: Allow not specifying producer properties in read-only Kafka table in StormSQL
  * STORM-2052: Kafka Spout New Client API - Log Improvements and Parameter Tuning for Better Performance.


[04/10] storm git commit: STORM-1970: external project examples refator

Posted by ka...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrFieldsSpout.java
----------------------------------------------------------------------
diff --git a/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrFieldsSpout.java b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrFieldsSpout.java
new file mode 100644
index 0000000..8e3390d
--- /dev/null
+++ b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrFieldsSpout.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.solr.spout;
+
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.base.BaseRichSpout;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import com.google.common.collect.Lists;
+import org.apache.storm.solr.util.TestUtil;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+public class SolrFieldsSpout extends BaseRichSpout {
+    private SpoutOutputCollector collector;
+    public static final List<Values> listValues = Lists.newArrayList(
+            getValues("1"), getValues("2"), getValues("3"));
+
+    private static Values getValues(String suf) {
+        String suffix = "_fields_test_val_" + suf;
+        return new Values(
+                "id" + suffix,
+                TestUtil.getDate(),
+                "dc_title" + suffix,
+                "Hugo%Miguel%Louro" + suffix,           // Multivalue field split by non default token %
+                "dynamic_field" + suffix + "_txt",      // to match dynamic fields of the form "*_txt"
+                "non_matching_field" + suffix);         // this field won't be indexed by solr
+    }
+
+    @Override
+    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+        this.collector = collector;
+    }
+
+    @Override
+    public void nextTuple() {
+        final Random rand = new Random();
+        final Values values = listValues.get(rand.nextInt(listValues.size()));
+        collector.emit(values);
+        Thread.yield();
+    }
+
+    @Override
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(getOutputFields());
+    }
+
+    public Fields getOutputFields() {
+        return new Fields("id","date","dc_title","author","dynamic_field_txt","non_matching_field");
+    }
+
+    @Override
+    public void close() {
+        super.close();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrJsonSpout.java
----------------------------------------------------------------------
diff --git a/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrJsonSpout.java b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrJsonSpout.java
new file mode 100644
index 0000000..e42155d
--- /dev/null
+++ b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrJsonSpout.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.solr.spout;
+
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.base.BaseRichSpout;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import com.google.common.collect.Lists;
+import com.google.gson.Gson;
+import org.apache.storm.solr.util.TestUtil;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+public class SolrJsonSpout extends BaseRichSpout {
+    private SpoutOutputCollector collector;
+    private static final List<Values> listValues = Lists.newArrayList(
+            getJsonValues("1"), getJsonValues("2"), getJsonValues("3"), // Tuple contains String Object in JSON format
+            getPojoValues("1"), getPojoValues("2"));    // Tuple contains Java object that must be serialized to JSON by SolrJsonMapper
+
+    @Override
+    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+        this.collector = collector;
+    }
+
+    @Override
+    public void nextTuple() {
+        final Random rand = new Random();
+        final Values values = listValues.get(rand.nextInt(listValues.size()));
+        collector.emit(values);
+        Thread.yield();
+    }
+
+    @Override
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(getOutputFields());
+    }
+
+    public Fields getOutputFields() {
+        return new Fields("JSON");
+    }
+
+    @Override
+    public void close() {   //TODO
+        super.close();
+    }
+
+    // ====
+
+    private static Values getJsonValues(String suf) {
+        String suffix = "_json_test_val_" + suf;
+        return new Values((new JsonSchema(suffix)).toJson());
+    }
+
+    private static Values getPojoValues(String suf) {
+        String suffix = "_json_test_val_" + suf;
+        return new Values(new JsonSchema(suffix));
+    }
+
+    public static class JsonSchema {
+        private String id;
+        private String date;
+        private String dc_title;
+
+        private static final Gson gson = new Gson();
+
+        public JsonSchema(String suffix) {
+            this.id = "id" + suffix;
+            this.date = TestUtil.getDate();
+            this.dc_title = "dc_title" + suffix;
+        }
+
+        public JsonSchema(String id, String date, String dc_title) {
+            this.id = id;
+            this.date = date;
+            this.dc_title = dc_title;
+        }
+
+        // copy constructor
+        public JsonSchema(JsonSchema jsonSchema) {
+            this.id = jsonSchema.id;
+            this.date = jsonSchema.date;
+            this.dc_title = jsonSchema.dc_title;
+        }
+
+        public String toJson() {
+            String json = gson.toJson(this);
+            System.out.println(json);   // TODO log
+            return json;
+        }
+
+        public static JsonSchema fromJson(String jsonStr) {
+            return new JsonSchema(gson.fromJson(jsonStr, JsonSchema.class));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrFieldsTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrFieldsTopology.java b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrFieldsTopology.java
new file mode 100644
index 0000000..5c9f16d
--- /dev/null
+++ b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrFieldsTopology.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.solr.topology;
+
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.solr.bolt.SolrUpdateBolt;
+import org.apache.storm.solr.config.CountBasedCommit;
+import org.apache.storm.solr.config.SolrCommitStrategy;
+import org.apache.storm.solr.mapper.SolrFieldsMapper;
+import org.apache.storm.solr.mapper.SolrMapper;
+import org.apache.storm.solr.schema.builder.RestJsonSchemaBuilder;
+import org.apache.storm.solr.spout.SolrFieldsSpout;
+
+import java.io.IOException;
+
+public class SolrFieldsTopology extends SolrTopology {
+        public static void main(String[] args) throws Exception {
+            SolrFieldsTopology solrFieldsTopology = new SolrFieldsTopology();
+            solrFieldsTopology.run(args);
+        }
+
+    protected SolrMapper getSolrMapper() throws IOException {
+        return new SolrFieldsMapper.Builder(
+                new RestJsonSchemaBuilder("localhost", "8983", COLLECTION), COLLECTION)
+                    .setMultiValueFieldToken("%").build();
+    }
+
+    protected SolrCommitStrategy getSolrCommitStgy() {
+        return new CountBasedCommit(2);         // To Commit to Solr and Ack according to the commit strategy
+    }
+
+    protected StormTopology getTopology() throws IOException {
+        TopologyBuilder builder = new TopologyBuilder();
+        builder.setSpout("SolrFieldsSpout", new SolrFieldsSpout());
+        builder.setBolt("SolrUpdateBolt", new SolrUpdateBolt(getSolrConfig(), getSolrMapper(), getSolrCommitStgy()))
+                .shuffleGrouping("SolrFieldsSpout");
+        return builder.createTopology();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrJsonTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrJsonTopology.java b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrJsonTopology.java
new file mode 100644
index 0000000..24e6b5e
--- /dev/null
+++ b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrJsonTopology.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.solr.topology;
+
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.solr.bolt.SolrUpdateBolt;
+import org.apache.storm.solr.mapper.SolrJsonMapper;
+import org.apache.storm.solr.mapper.SolrMapper;
+import org.apache.storm.solr.spout.SolrJsonSpout;
+
+import java.io.IOException;
+
+public class SolrJsonTopology extends SolrTopology {
+    public static void main(String[] args) throws Exception {
+        SolrJsonTopology solrJsonTopology = new SolrJsonTopology();
+        solrJsonTopology.run(args);
+    }
+
+    protected SolrMapper getSolrMapper() throws IOException {
+        final String jsonTupleField = "JSON";
+        return new SolrJsonMapper.Builder(COLLECTION, jsonTupleField).build();
+    }
+
+    protected StormTopology getTopology() throws IOException {
+        TopologyBuilder builder = new TopologyBuilder();
+        builder.setSpout("SolrJsonSpout", new SolrJsonSpout());
+        builder.setBolt("SolrUpdateBolt", new SolrUpdateBolt(getSolrConfig(), getSolrMapper(), getSolrCommitStgy()))
+                .shuffleGrouping("SolrJsonSpout");
+        return builder.createTopology();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrTopology.java b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrTopology.java
new file mode 100644
index 0000000..92d90a3
--- /dev/null
+++ b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/topology/SolrTopology.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.solr.topology;
+
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.solr.config.SolrCommitStrategy;
+import org.apache.storm.solr.config.SolrConfig;
+
+import java.io.IOException;
+
+public abstract class SolrTopology {
+    protected static String COLLECTION = "gettingstarted";
+
+    public void run(String[] args) throws Exception {
+        final StormTopology topology = getTopology();
+        final Config config = getConfig();
+
+        if (args.length == 0) {
+            submitTopologyLocalCluster(topology, config);
+        } else {
+            submitTopologyRemoteCluster(args[0], topology, config);
+        }
+    }
+
+    protected abstract StormTopology getTopology() throws IOException;
+
+    protected void submitTopologyRemoteCluster(String arg, StormTopology topology, Config config) throws Exception {
+        StormSubmitter.submitTopology(arg, config, topology);
+    }
+
+    protected void submitTopologyLocalCluster(StormTopology topology, Config config) throws InterruptedException {
+        LocalCluster cluster = new LocalCluster();
+        cluster.submitTopology("test", config, topology);
+        Thread.sleep(10000);
+        System.out.println("Killing topology per client's request");
+        cluster.killTopology("test");
+        cluster.shutdown();
+        System.exit(0);
+    }
+
+    protected Config getConfig() {
+        Config config = new Config();
+        config.setDebug(true);
+        return config;
+    }
+
+    protected SolrCommitStrategy getSolrCommitStgy() {
+        return null;                          // To Commit to Solr and Ack every tuple
+    }
+
+    protected static SolrConfig getSolrConfig() {
+        String zkHostString = "127.0.0.1:9983";  // zkHostString for Solr gettingstarted example
+        return new SolrConfig(zkHostString);
+    }
+
+    protected static SolrClient getSolrClient() {
+        String zkHostString = "127.0.0.1:9983";  // zkHostString for Solr gettingstarted example
+        return new CloudSolrClient(zkHostString);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/trident/SolrFieldsTridentTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/trident/SolrFieldsTridentTopology.java b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/trident/SolrFieldsTridentTopology.java
new file mode 100644
index 0000000..d022c8a
--- /dev/null
+++ b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/trident/SolrFieldsTridentTopology.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.solr.trident;
+
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.solr.spout.SolrFieldsSpout;
+import org.apache.storm.solr.topology.SolrFieldsTopology;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.state.StateFactory;
+
+import java.io.IOException;
+
+public class SolrFieldsTridentTopology extends SolrFieldsTopology {
+    public static void main(String[] args) throws Exception {
+        SolrFieldsTridentTopology solrFieldsTridentTopology = new SolrFieldsTridentTopology();
+        solrFieldsTridentTopology.run(args);
+    }
+
+    protected StormTopology getTopology() throws IOException {
+        final TridentTopology tridentTopology = new TridentTopology();
+        final SolrFieldsSpout spout = new SolrFieldsSpout();
+        final Stream stream = tridentTopology.newStream("SolrFieldsSpout", spout);
+        final StateFactory solrStateFactory = new SolrStateFactory(getSolrConfig(), getSolrMapper());
+        stream.partitionPersist(solrStateFactory, spout.getOutputFields(),  new SolrUpdater(), new Fields());
+        return tridentTopology.build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/trident/SolrJsonTridentTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/trident/SolrJsonTridentTopology.java b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/trident/SolrJsonTridentTopology.java
new file mode 100644
index 0000000..75131b8
--- /dev/null
+++ b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/trident/SolrJsonTridentTopology.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.solr.trident;
+
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.solr.spout.SolrJsonSpout;
+import org.apache.storm.solr.topology.SolrJsonTopology;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.state.StateFactory;
+
+import java.io.IOException;
+
+public class SolrJsonTridentTopology extends SolrJsonTopology {
+    public static void main(String[] args) throws Exception {
+        SolrJsonTridentTopology solrJsonTridentTopology = new SolrJsonTridentTopology();
+        solrJsonTridentTopology.run(args);
+    }
+
+    protected StormTopology getTopology() throws IOException {
+        final TridentTopology topology = new TridentTopology();
+        final SolrJsonSpout spout = new SolrJsonSpout();
+        final Stream stream = topology.newStream("SolrJsonSpout", spout);
+        final StateFactory solrStateFactory = new SolrStateFactory(getSolrConfig(), getSolrMapper());
+        stream.partitionPersist(solrStateFactory, spout.getOutputFields(),  new SolrUpdater(), new Fields());
+        return topology.build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/util/TestUtil.java
----------------------------------------------------------------------
diff --git a/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/util/TestUtil.java b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/util/TestUtil.java
new file mode 100644
index 0000000..84a866b
--- /dev/null
+++ b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/util/TestUtil.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.solr.util;
+
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+public class TestUtil {
+    public static String getDate() {
+        DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
+        return df.format(new Date());
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-elasticsearch/src/test/java/org/apache/storm/elasticsearch/bolt/EsIndexTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-elasticsearch/src/test/java/org/apache/storm/elasticsearch/bolt/EsIndexTopology.java b/external/storm-elasticsearch/src/test/java/org/apache/storm/elasticsearch/bolt/EsIndexTopology.java
deleted file mode 100644
index d30424b..0000000
--- a/external/storm-elasticsearch/src/test/java/org/apache/storm/elasticsearch/bolt/EsIndexTopology.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.elasticsearch.bolt;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.topology.base.BaseRichSpout;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.elasticsearch.common.EsConfig;
-import org.apache.storm.elasticsearch.common.EsConstants;
-import org.apache.storm.elasticsearch.common.EsTestUtil;
-import org.apache.storm.elasticsearch.common.EsTupleMapper;
-
-import java.util.Map;
-import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-
-public class EsIndexTopology {
-
-    static final String SPOUT_ID = "spout";
-    static final String BOLT_ID = "bolt";
-    static final String TOPOLOGY_NAME = "elasticsearch-test-topology1";
-
-    public static void main(String[] args) throws Exception {
-        Config config = new Config();
-        config.setNumWorkers(1);
-        TopologyBuilder builder = new TopologyBuilder();
-        UserDataSpout spout = new UserDataSpout();
-        builder.setSpout(SPOUT_ID, spout, 1);
-        EsTupleMapper tupleMapper = EsTestUtil.generateDefaultTupleMapper();
-        EsConfig esConfig = new EsConfig(EsConstants.clusterName, new String[]{"localhost:9300"});
-        builder.setBolt(BOLT_ID, new EsIndexBolt(esConfig, tupleMapper), 1).shuffleGrouping(SPOUT_ID);
-
-        EsTestUtil.startEsNode();
-        EsTestUtil.waitForSeconds(5);
-
-        LocalCluster cluster = new LocalCluster();
-        cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
-        EsTestUtil.waitForSeconds(20);
-        cluster.killTopology(TOPOLOGY_NAME);
-        System.out.println("cluster begin to shutdown");
-        cluster.shutdown();
-        System.out.println("cluster shutdown");
-        System.exit(0);
-    }
-
-    public static class UserDataSpout extends BaseRichSpout {
-        private ConcurrentHashMap<UUID, Values> pending;
-        private SpoutOutputCollector collector;
-        private String[] sources = {
-                "{\"user\":\"user1\"}",
-                "{\"user\":\"user2\"}",
-                "{\"user\":\"user3\"}",
-                "{\"user\":\"user4\"}"
-        };
-        private int index = 0;
-        private int count = 0;
-        private long total = 0L;
-        private String indexName = "index1";
-        private String typeName = "type1";
-
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            declarer.declare(new Fields("source", "index", "type", "id"));
-        }
-
-        public void open(Map config, TopologyContext context,
-                         SpoutOutputCollector collector) {
-            this.collector = collector;
-            this.pending = new ConcurrentHashMap<UUID, Values>();
-        }
-
-        public void nextTuple() {
-            String source = sources[index];
-            UUID msgId = UUID.randomUUID();
-            Values values = new Values(source, indexName, typeName, msgId);
-            this.pending.put(msgId, values);
-            this.collector.emit(values, msgId);
-            index++;
-            if (index >= sources.length) {
-                index = 0;
-            }
-            count++;
-            total++;
-            if (count > 1000) {
-                count = 0;
-                System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
-            }
-            Thread.yield();
-        }
-
-        public void ack(Object msgId) {
-            this.pending.remove(msgId);
-        }
-
-        public void fail(Object msgId) {
-            System.out.println("**** RESENDING FAILED TUPLE");
-            this.collector.emit(this.pending.get(msgId), msgId);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-elasticsearch/src/test/java/org/apache/storm/elasticsearch/trident/TridentEsTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-elasticsearch/src/test/java/org/apache/storm/elasticsearch/trident/TridentEsTopology.java b/external/storm-elasticsearch/src/test/java/org/apache/storm/elasticsearch/trident/TridentEsTopology.java
deleted file mode 100644
index 67eab5b..0000000
--- a/external/storm-elasticsearch/src/test/java/org/apache/storm/elasticsearch/trident/TridentEsTopology.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.elasticsearch.trident;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.elasticsearch.common.EsConfig;
-import org.apache.storm.elasticsearch.common.EsConstants;
-import org.apache.storm.elasticsearch.common.EsTestUtil;
-import org.apache.storm.elasticsearch.common.EsTupleMapper;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentState;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.operation.TridentCollector;
-import org.apache.storm.trident.spout.IBatchSpout;
-import org.apache.storm.trident.state.StateFactory;
-
-import java.util.*;
-
-public class TridentEsTopology {
-
-    static final String TOPOLOGY_NAME = "elasticsearch-test-topology2";
-
-    public static void main(String[] args) {
-        int batchSize = 100;
-        FixedBatchSpout spout = new FixedBatchSpout(batchSize);
-        spout.setCycle(true);
-
-        TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("spout", spout);
-        EsConfig esConfig = new EsConfig(EsConstants.clusterName, new String[]{"localhost:9300"});
-        Fields esFields = new Fields("index", "type", "source");
-        EsTupleMapper tupleMapper = EsTestUtil.generateDefaultTupleMapper();
-        StateFactory factory = new EsStateFactory(esConfig, tupleMapper);
-        TridentState state = stream.partitionPersist(factory, esFields, new EsUpdater(), new Fields());
-
-        EsTestUtil.startEsNode();
-        EsTestUtil.waitForSeconds(5);
-
-        LocalCluster cluster = new LocalCluster();
-        cluster.submitTopology(TOPOLOGY_NAME, null, topology.build());
-        EsTestUtil.waitForSeconds(20);
-        cluster.killTopology(TOPOLOGY_NAME);
-        System.out.println("cluster begin to shutdown");
-        cluster.shutdown();
-        System.out.println("cluster shutdown");
-        System.exit(0);
-    }
-
-    public static class FixedBatchSpout implements IBatchSpout {
-        int maxBatchSize;
-        HashMap<Long, List<List<Object>>> batches = new HashMap<Long, List<List<Object>>>();
-        private Values[] outputs = {
-                new Values("{\"user\":\"user1\"}", "index1", "type1", UUID.randomUUID().toString()),
-                new Values("{\"user\":\"user2\"}", "index1", "type2", UUID.randomUUID().toString()),
-                new Values("{\"user\":\"user3\"}", "index2", "type1", UUID.randomUUID().toString()),
-                new Values("{\"user\":\"user4\"}", "index2", "type2", UUID.randomUUID().toString())
-        };
-        private int index = 0;
-        boolean cycle = false;
-
-        public FixedBatchSpout(int maxBatchSize) {
-            this.maxBatchSize = maxBatchSize;
-        }
-
-        public void setCycle(boolean cycle) {
-            this.cycle = cycle;
-        }
-
-        @Override
-        public Fields getOutputFields() {
-            return new Fields("source", "index", "type", "id");
-        }
-
-        @Override
-        public void open(Map conf, TopologyContext context) {
-            index = 0;
-        }
-
-        @Override
-        public void emitBatch(long batchId, TridentCollector collector) {
-            List<List<Object>> batch = this.batches.get(batchId);
-            if (batch == null) {
-                batch = new ArrayList<List<Object>>();
-                if (index >= outputs.length && cycle) {
-                    index = 0;
-                }
-                for (int i = 0; i < maxBatchSize; index++, i++) {
-                    if (index == outputs.length) {
-                        index = 0;
-                    }
-                    batch.add(outputs[index]);
-                }
-                this.batches.put(batchId, batch);
-            }
-            for (List<Object> list : batch) {
-                collector.emit(list);
-            }
-        }
-
-        @Override
-        public void ack(long batchId) {
-            this.batches.remove(batchId);
-        }
-
-        @Override
-        public void close() {
-        }
-
-        @Override
-        public Map<String, Object> getComponentConfiguration() {
-            Config conf = new Config();
-            conf.setMaxTaskParallelism(1);
-            return conf;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/LookupWordCount.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/LookupWordCount.java b/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/LookupWordCount.java
deleted file mode 100644
index 43f72ae..0000000
--- a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/LookupWordCount.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hbase.topology;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.hbase.bolt.HBaseLookupBolt;
-import org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria;
-import org.apache.storm.hbase.bolt.mapper.SimpleHBaseMapper;
-
-import java.util.HashMap;
-import java.util.Map;
-
-
-public class LookupWordCount {
-    private static final String WORD_SPOUT = "WORD_SPOUT";
-    private static final String LOOKUP_BOLT = "LOOKUP_BOLT";
-    private static final String TOTAL_COUNT_BOLT = "TOTAL_COUNT_BOLT";
-
-    public static void main(String[] args) throws Exception {
-        Config config = new Config();
-
-        Map<String, Object> hbConf = new HashMap<String, Object>();
-        if(args.length > 0){
-            hbConf.put("hbase.rootdir", args[0]);
-        }
-        config.put("hbase.conf", hbConf);
-
-        WordSpout spout = new WordSpout();
-        TotalWordCounter totalBolt = new TotalWordCounter();
-
-        SimpleHBaseMapper mapper = new SimpleHBaseMapper().withRowKeyField("word");
-        HBaseProjectionCriteria projectionCriteria = new HBaseProjectionCriteria();
-        projectionCriteria.addColumn(new HBaseProjectionCriteria.ColumnMetaData("cf", "count"));
-
-        WordCountValueMapper rowToTupleMapper = new WordCountValueMapper();
-
-        HBaseLookupBolt hBaseLookupBolt = new HBaseLookupBolt("WordCount", mapper, rowToTupleMapper)
-                .withConfigKey("hbase.conf")
-                .withProjectionCriteria(projectionCriteria);
-
-        //wordspout -> lookupbolt -> totalCountBolt
-        TopologyBuilder builder = new TopologyBuilder();
-        builder.setSpout(WORD_SPOUT, spout, 1);
-        builder.setBolt(LOOKUP_BOLT, hBaseLookupBolt, 1).shuffleGrouping(WORD_SPOUT);
-        builder.setBolt(TOTAL_COUNT_BOLT, totalBolt, 1).fieldsGrouping(LOOKUP_BOLT, new Fields("columnName"));
-
-        if (args.length == 1) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test", config, builder.createTopology());
-            Thread.sleep(30000);
-            cluster.killTopology("test");
-            cluster.shutdown();
-            System.exit(0);
-        } else if (args.length == 2) {
-            StormSubmitter.submitTopology(args[1], config, builder.createTopology());
-        } else{
-            System.out.println("Usage: LookupWordCount <hbase.rootdir>");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/PersistentWordCount.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/PersistentWordCount.java b/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/PersistentWordCount.java
deleted file mode 100644
index cfb94d0..0000000
--- a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/PersistentWordCount.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hbase.topology;
-
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.hbase.bolt.HBaseBolt;
-import org.apache.storm.hbase.bolt.mapper.SimpleHBaseMapper;
-import org.apache.storm.hbase.security.HBaseSecurityUtil;
-
-
-import java.util.HashMap;
-import java.util.Map;
-
-
-public class PersistentWordCount {
-    private static final String WORD_SPOUT = "WORD_SPOUT";
-    private static final String COUNT_BOLT = "COUNT_BOLT";
-    private static final String HBASE_BOLT = "HBASE_BOLT";
-
-
-    public static void main(String[] args) throws Exception {
-        Config config = new Config();
-
-        Map<String, Object> hbConf = new HashMap<String, Object>();
-        if(args.length > 0){
-            hbConf.put("hbase.rootdir", args[0]);
-        }
-        config.put("hbase.conf", hbConf);
-
-        WordSpout spout = new WordSpout();
-        WordCounter bolt = new WordCounter();
-
-        SimpleHBaseMapper mapper = new SimpleHBaseMapper()
-                .withRowKeyField("word")
-                .withColumnFields(new Fields("word"))
-                .withCounterFields(new Fields("count"))
-                .withColumnFamily("cf");
-
-        HBaseBolt hbase = new HBaseBolt("WordCount", mapper)
-                .withConfigKey("hbase.conf");
-
-
-        // wordSpout ==> countBolt ==> HBaseBolt
-        TopologyBuilder builder = new TopologyBuilder();
-
-        builder.setSpout(WORD_SPOUT, spout, 1);
-        builder.setBolt(COUNT_BOLT, bolt, 1).shuffleGrouping(WORD_SPOUT);
-        builder.setBolt(HBASE_BOLT, hbase, 1).fieldsGrouping(COUNT_BOLT, new Fields("word"));
-
-
-        if (args.length == 1) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test", config, builder.createTopology());
-            Thread.sleep(30000);
-            cluster.killTopology("test");
-            cluster.shutdown();
-            System.exit(0);
-        } else if (args.length == 2) {
-            StormSubmitter.submitTopology(args[1], config, builder.createTopology());
-        } else if (args.length == 4) {
-            System.out.println("hdfs url: " + args[0] + ", keytab file: " + args[2] + 
-                ", principal name: " + args[3] + ", toplogy name: " + args[1]);
-            hbConf.put(HBaseSecurityUtil.STORM_KEYTAB_FILE_KEY, args[2]);
-            hbConf.put(HBaseSecurityUtil.STORM_USER_NAME_KEY, args[3]);
-            config.setNumWorkers(3);
-            StormSubmitter.submitTopology(args[1], config, builder.createTopology());
-        } else {
-            System.out.println("Usage: PersistentWordCount <hbase.rootdir> [topology name] [keytab file] [principal name]");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/TotalWordCounter.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/TotalWordCounter.java b/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/TotalWordCounter.java
deleted file mode 100644
index 61b0dd8..0000000
--- a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/TotalWordCounter.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hbase.topology;
-
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.BasicOutputCollector;
-import org.apache.storm.topology.IBasicBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.math.BigInteger;
-import java.util.Map;
-import java.util.Random;
-
-import static org.apache.storm.utils.Utils.tuple;
-
-public class TotalWordCounter implements IBasicBolt {
-
-    private BigInteger total = BigInteger.ZERO;
-    private static final Logger LOG = LoggerFactory.getLogger(TotalWordCounter.class);
-    private static final Random RANDOM = new Random();
-    @SuppressWarnings("rawtypes")
-    public void prepare(Map stormConf, TopologyContext context) {
-    }
-
-    /*
-     * Just output the word value with a count of 1.
-     * The HBaseBolt will handle incrementing the counter.
-     */
-    public void execute(Tuple input, BasicOutputCollector collector) {
-        total = total.add(new BigInteger(input.getValues().get(1).toString()));
-        collector.emit(tuple(total.toString()));
-        //prints the total with low probability.
-        if(RANDOM.nextInt(1000) > 995) {
-            LOG.info("Running total = " + total);
-        }
-    }
-
-    public void cleanup() {
-        LOG.info("Final total = " + total);
-    }
-
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(new Fields("total"));
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCountClient.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCountClient.java b/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCountClient.java
deleted file mode 100644
index 33ce450..0000000
--- a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCountClient.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hbase.topology;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.util.Bytes;
-
-/**
- * Connects to the 'WordCount' table and prints counts for each word.
- *
- * Assumes you have run (or are running) <code>PersistentWordCount</code>
- */
-public class WordCountClient {
-
-    public static void main(String[] args) throws Exception {
-        Configuration config = HBaseConfiguration.create();
-        if(args.length > 0){
-            config.set("hbase.rootdir", args[0]);
-        }
-
-        HTable table = new HTable(config, "WordCount");
-
-
-        for (String word : WordSpout.words) {
-            Get get = new Get(Bytes.toBytes(word));
-            Result result = table.get(get);
-
-            byte[] countBytes = result.getValue(Bytes.toBytes("cf"), Bytes.toBytes("count"));
-            byte[] wordBytes = result.getValue(Bytes.toBytes("cf"), Bytes.toBytes("word"));
-
-            String wordStr = Bytes.toString(wordBytes);
-            System.out.println(wordStr);
-            long count = Bytes.toLong(countBytes);
-            System.out.println("Word: '" + wordStr + "', Count: " + count);
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCountValueMapper.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCountValueMapper.java b/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCountValueMapper.java
deleted file mode 100644
index 6c3301b..0000000
--- a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCountValueMapper.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hbase.topology;
-
-
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.ITuple;
-import org.apache.storm.tuple.Values;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.storm.hbase.bolt.mapper.HBaseValueMapper;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Takes a Hbase result and returns a value list that has a value instance for each column and corresponding value.
- * So if the result from Hbase was
- * <pre>
- * WORD, COUNT
- * apple, 10
- * bannana, 20
- * </pre>
- *
- * this will return
- * <pre>
- *     [WORD, apple]
- *     [COUNT, 10]
- *     [WORD, banana]
- *     [COUNT, 20]
- * </pre>
- *
- */
-public class WordCountValueMapper implements HBaseValueMapper {
-
-    @Override
-    public List<Values> toValues(ITuple tuple, Result result) throws Exception {
-        List<Values> values = new ArrayList<Values>();
-        Cell[] cells = result.rawCells();
-        for(Cell cell : cells) {
-            Values value = new Values (Bytes.toString(CellUtil.cloneQualifier(cell)), Bytes.toLong(CellUtil.cloneValue(cell)));
-            values.add(value);
-        }
-        return values;
-    }
-
-    @Override
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(new Fields("columnName","columnValue"));
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCounter.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCounter.java b/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCounter.java
deleted file mode 100644
index 3a350a8..0000000
--- a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordCounter.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hbase.topology;
-
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.BasicOutputCollector;
-import org.apache.storm.topology.IBasicBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-
-import java.util.Map;
-
-import static org.apache.storm.utils.Utils.tuple;
-
-public class WordCounter implements IBasicBolt {
-
-
-    @SuppressWarnings("rawtypes")
-    public void prepare(Map stormConf, TopologyContext context) {
-    }
-
-    /*
-     * Just output the word value with a count of 1.
-     * The HBaseBolt will handle incrementing the counter.
-     */
-    public void execute(Tuple input, BasicOutputCollector collector) {
-        collector.emit(tuple(input.getValues().get(0), 1));
-    }
-
-    public void cleanup() {
-
-    }
-
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(new Fields("word", "count"));
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordSpout.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordSpout.java b/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordSpout.java
deleted file mode 100644
index c5fc490..0000000
--- a/external/storm-hbase/src/test/java/org/apache/storm/hbase/topology/WordSpout.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hbase.topology;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-
-import java.util.Map;
-import java.util.Random;
-import java.util.UUID;
-
-public class WordSpout implements IRichSpout {
-    boolean isDistributed;
-    SpoutOutputCollector collector;
-    public static final String[] words = new String[] { "apple", "orange", "pineapple", "banana", "watermelon" };
-
-    public WordSpout() {
-        this(true);
-    }
-
-    public WordSpout(boolean isDistributed) {
-        this.isDistributed = isDistributed;
-    }
-
-    public boolean isDistributed() {
-        return this.isDistributed;
-    }
-
-    @SuppressWarnings("rawtypes")
-    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-        this.collector = collector;
-    }
-
-    public void close() {
-
-    }
-
-    public void nextTuple() {
-        final Random rand = new Random();
-        final String word = words[rand.nextInt(words.length)];
-        this.collector.emit(new Values(word), UUID.randomUUID());
-        Thread.yield();
-    }
-
-    public void ack(Object msgId) {
-
-    }
-
-    public void fail(Object msgId) {
-
-    }
-
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(new Fields("word"));
-    }
-
-    @Override
-    public void activate() {
-    }
-
-    @Override
-    public void deactivate() {
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hbase/src/test/java/org/apache/storm/hbase/trident/PrintFunction.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/test/java/org/apache/storm/hbase/trident/PrintFunction.java b/external/storm-hbase/src/test/java/org/apache/storm/hbase/trident/PrintFunction.java
deleted file mode 100644
index cdc7690..0000000
--- a/external/storm-hbase/src/test/java/org/apache/storm/hbase/trident/PrintFunction.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hbase.trident;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.storm.trident.operation.BaseFunction;
-import org.apache.storm.trident.operation.TridentCollector;
-import org.apache.storm.trident.tuple.TridentTuple;
-
-import java.util.Random;
-
-public class PrintFunction extends BaseFunction {
-
-    private static final Logger LOG = LoggerFactory.getLogger(PrintFunction.class);
-
-    private static final Random RANDOM = new Random();
-
-    @Override
-    public void execute(TridentTuple tuple, TridentCollector tridentCollector) {
-        if(RANDOM.nextInt(1000) > 995) {
-            LOG.info(tuple.toString());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hbase/src/test/java/org/apache/storm/hbase/trident/WordCountTrident.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/test/java/org/apache/storm/hbase/trident/WordCountTrident.java b/external/storm-hbase/src/test/java/org/apache/storm/hbase/trident/WordCountTrident.java
deleted file mode 100644
index b2f0ce8..0000000
--- a/external/storm-hbase/src/test/java/org/apache/storm/hbase/trident/WordCountTrident.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hbase.trident;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.hadoop.hbase.client.Durability;
-import org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria;
-import org.apache.storm.hbase.bolt.mapper.HBaseValueMapper;
-import org.apache.storm.hbase.topology.WordCountValueMapper;
-import org.apache.storm.hbase.trident.mapper.SimpleTridentHBaseMapper;
-import org.apache.storm.hbase.trident.mapper.TridentHBaseMapper;
-import org.apache.storm.hbase.trident.state.HBaseQuery;
-import org.apache.storm.hbase.trident.state.HBaseState;
-import org.apache.storm.hbase.trident.state.HBaseStateFactory;
-import org.apache.storm.hbase.trident.state.HBaseUpdater;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentState;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.state.StateFactory;
-import org.apache.storm.trident.testing.FixedBatchSpout;
-
-public class WordCountTrident {
-    public static StormTopology buildTopology(String hbaseRoot){
-        Fields fields = new Fields("word", "count");
-        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
-                new Values("storm", 1),
-                new Values("trident", 1),
-                new Values("needs", 1),
-                new Values("javadoc", 1)
-        );
-        spout.setCycle(true);
-
-        TridentHBaseMapper tridentHBaseMapper = new SimpleTridentHBaseMapper()
-                .withColumnFamily("cf")
-                .withColumnFields(new Fields("word"))
-                .withCounterFields(new Fields("count"))
-                .withRowKeyField("word");
-
-        HBaseValueMapper rowToStormValueMapper = new WordCountValueMapper();
-
-        HBaseProjectionCriteria projectionCriteria = new HBaseProjectionCriteria();
-        projectionCriteria.addColumn(new HBaseProjectionCriteria.ColumnMetaData("cf", "count"));
-
-        HBaseState.Options options = new HBaseState.Options()
-                .withConfigKey(hbaseRoot)
-                .withDurability(Durability.SYNC_WAL)
-                .withMapper(tridentHBaseMapper)
-                .withProjectionCriteria(projectionCriteria)
-                .withRowToStormValueMapper(rowToStormValueMapper)
-                .withTableName("WordCount");
-
-        StateFactory factory = new HBaseStateFactory(options);
-
-        TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("spout1", spout);
-
-        stream.partitionPersist(factory, fields,  new HBaseUpdater(), new Fields());
-
-        TridentState state = topology.newStaticState(factory);
-        stream = stream.stateQuery(state, new Fields("word"), new HBaseQuery(), new Fields("columnName","columnValue"));
-        stream.each(new Fields("word","columnValue"), new PrintFunction(), new Fields());
-        return topology.build();
-    }
-
-    public static void main(String[] args) throws Exception {
-        Config conf = new Config();
-        conf.setMaxSpoutPending(5);
-        if (args.length == 1) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("wordCounter", conf, buildTopology(args[0]));
-            Thread.sleep(60 * 1000);
-            cluster.killTopology("wordCounter");
-            cluster.shutdown();
-            System.exit(0);
-        }
-        else if(args.length == 2) {
-            conf.setNumWorkers(3);
-            StormSubmitter.submitTopology(args[1], conf, buildTopology(args[0]));
-        } else{
-            System.out.println("Usage: TridentFileTopology <hdfs url> [topology name]");
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/bolt/HdfsFileTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/bolt/HdfsFileTopology.java b/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/bolt/HdfsFileTopology.java
deleted file mode 100644
index b1ae542..0000000
--- a/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/bolt/HdfsFileTopology.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hdfs.bolt;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.topology.base.BaseRichBolt;
-import org.apache.storm.topology.base.BaseRichSpout;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.hdfs.bolt.format.DefaultFileNameFormat;
-import org.apache.storm.hdfs.bolt.format.DelimitedRecordFormat;
-import org.apache.storm.hdfs.bolt.format.FileNameFormat;
-import org.apache.storm.hdfs.bolt.format.RecordFormat;
-import org.apache.storm.hdfs.bolt.rotation.FileRotationPolicy;
-import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy;
-import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy.Units;
-import org.apache.storm.hdfs.bolt.rotation.TimedRotationPolicy;
-import org.apache.storm.hdfs.bolt.sync.CountSyncPolicy;
-import org.apache.storm.hdfs.bolt.sync.SyncPolicy;
-import org.apache.storm.hdfs.common.rotation.MoveFileAction;
-import org.yaml.snakeyaml.Yaml;
-
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-
-public class HdfsFileTopology {
-    static final String SENTENCE_SPOUT_ID = "sentence-spout";
-    static final String BOLT_ID = "my-bolt";
-    static final String TOPOLOGY_NAME = "test-topology";
-
-    public static void main(String[] args) throws Exception {
-        Config config = new Config();
-        config.setNumWorkers(1);
-
-        SentenceSpout spout = new SentenceSpout();
-
-        // sync the filesystem after every 1k tuples
-        SyncPolicy syncPolicy = new CountSyncPolicy(1000);
-
-        // rotate files when they reach 5MB
-        FileRotationPolicy rotationPolicy = new TimedRotationPolicy(1.0f, TimedRotationPolicy.TimeUnit.MINUTES);
-
-        FileNameFormat fileNameFormat = new DefaultFileNameFormat()
-                .withPath("/tmp/foo/")
-                .withExtension(".txt");
-
-        // use "|" instead of "," for field delimiter
-        RecordFormat format = new DelimitedRecordFormat()
-                .withFieldDelimiter("|");
-
-        Yaml yaml = new Yaml();
-        InputStream in = new FileInputStream(args[1]);
-        Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in);
-        in.close();
-        config.put("hdfs.config", yamlConf);
-
-        HdfsBolt bolt = new HdfsBolt()
-                .withConfigKey("hdfs.config")
-                .withFsUrl(args[0])
-                .withFileNameFormat(fileNameFormat)
-                .withRecordFormat(format)
-                .withRotationPolicy(rotationPolicy)
-                .withSyncPolicy(syncPolicy)
-                .addRotationAction(new MoveFileAction().toDestination("/tmp/dest2/"));
-
-        TopologyBuilder builder = new TopologyBuilder();
-
-        builder.setSpout(SENTENCE_SPOUT_ID, spout, 1);
-        // SentenceSpout --> MyBolt
-        builder.setBolt(BOLT_ID, bolt, 4)
-                .shuffleGrouping(SENTENCE_SPOUT_ID);
-
-        if (args.length == 2) {
-            LocalCluster cluster = new LocalCluster();
-
-            cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
-            waitForSeconds(120);
-            cluster.killTopology(TOPOLOGY_NAME);
-            cluster.shutdown();
-            System.exit(0);
-        } else if (args.length == 3) {
-            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
-        } else{
-            System.out.println("Usage: HdfsFileTopology [hdfs url] [hdfs yaml config file] <topology name>");
-        }
-    }
-
-    public static void waitForSeconds(int seconds) {
-        try {
-            Thread.sleep(seconds * 1000);
-        } catch (InterruptedException e) {
-        }
-    }
-
-    public static class SentenceSpout extends BaseRichSpout {
-        private ConcurrentHashMap<UUID, Values> pending;
-        private SpoutOutputCollector collector;
-        private String[] sentences = {
-                "my dog has fleas",
-                "i like cold beverages",
-                "the dog ate my homework",
-                "don't have a cow man",
-                "i don't think i like fleas"
-        };
-        private int index = 0;
-        private int count = 0;
-        private long total = 0L;
-
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            declarer.declare(new Fields("sentence", "timestamp"));
-        }
-
-        public void open(Map config, TopologyContext context,
-                         SpoutOutputCollector collector) {
-            this.collector = collector;
-            this.pending = new ConcurrentHashMap<UUID, Values>();
-        }
-
-        public void nextTuple() {
-            Values values = new Values(sentences[index], System.currentTimeMillis());
-            UUID msgId = UUID.randomUUID();
-            this.pending.put(msgId, values);
-            this.collector.emit(values, msgId);
-            index++;
-            if (index >= sentences.length) {
-                index = 0;
-            }
-            count++;
-            total++;
-            if(count > 20000){
-                count = 0;
-                System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
-            }
-            Thread.yield();
-        }
-
-        public void ack(Object msgId) {
-            this.pending.remove(msgId);
-        }
-
-        public void fail(Object msgId) {
-            System.out.println("**** RESENDING FAILED TUPLE");
-            this.collector.emit(this.pending.get(msgId), msgId);
-        }
-    }
-
-    public static class MyBolt extends BaseRichBolt {
-
-        private HashMap<String, Long> counts = null;
-        private OutputCollector collector;
-
-        public void prepare(Map config, TopologyContext context, OutputCollector collector) {
-            this.counts = new HashMap<String, Long>();
-            this.collector = collector;
-        }
-
-        public void execute(Tuple tuple) {
-            collector.ack(tuple);
-        }
-
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            // this bolt does not emit anything
-        }
-
-        @Override
-        public void cleanup() {
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/bolt/SequenceFileTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/bolt/SequenceFileTopology.java b/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/bolt/SequenceFileTopology.java
deleted file mode 100644
index 86bc698..0000000
--- a/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/bolt/SequenceFileTopology.java
+++ /dev/null
@@ -1,202 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hdfs.bolt;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.topology.base.BaseRichBolt;
-import org.apache.storm.topology.base.BaseRichSpout;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.hdfs.bolt.format.*;
-import org.apache.storm.hdfs.bolt.rotation.FileRotationPolicy;
-import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy;
-import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy.Units;
-import org.apache.storm.hdfs.bolt.sync.CountSyncPolicy;
-import org.apache.storm.hdfs.bolt.sync.SyncPolicy;
-import org.apache.storm.hdfs.common.rotation.MoveFileAction;
-
-import org.apache.hadoop.io.SequenceFile;
-import org.yaml.snakeyaml.Yaml;
-
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-
-public class SequenceFileTopology {
-    static final String SENTENCE_SPOUT_ID = "sentence-spout";
-    static final String BOLT_ID = "my-bolt";
-    static final String TOPOLOGY_NAME = "test-topology";
-
-    public static void main(String[] args) throws Exception {
-        Config config = new Config();
-        config.setNumWorkers(1);
-
-        SentenceSpout spout = new SentenceSpout();
-
-        // sync the filesystem after every 1k tuples
-        SyncPolicy syncPolicy = new CountSyncPolicy(1000);
-
-        // rotate files when they reach 5MB
-        FileRotationPolicy rotationPolicy = new FileSizeRotationPolicy(5.0f, Units.MB);
-
-        FileNameFormat fileNameFormat = new DefaultFileNameFormat()
-                .withPath("/tmp/source/")
-                .withExtension(".seq");
-
-        // create sequence format instance.
-        DefaultSequenceFormat format = new DefaultSequenceFormat("timestamp", "sentence");
-
-        Yaml yaml = new Yaml();
-        InputStream in = new FileInputStream(args[1]);
-        Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in);
-        in.close();
-        config.put("hdfs.config", yamlConf);
-
-        SequenceFileBolt bolt = new SequenceFileBolt()
-                .withFsUrl(args[0])
-                .withConfigKey("hdfs.config")
-                .withFileNameFormat(fileNameFormat)
-                .withSequenceFormat(format)
-                .withRotationPolicy(rotationPolicy)
-                .withSyncPolicy(syncPolicy)
-                .withCompressionType(SequenceFile.CompressionType.RECORD)
-                .withCompressionCodec("deflate")
-                .addRotationAction(new MoveFileAction().toDestination("/tmp/dest/"));
-
-        TopologyBuilder builder = new TopologyBuilder();
-
-        builder.setSpout(SENTENCE_SPOUT_ID, spout, 1);
-        // SentenceSpout --> MyBolt
-        builder.setBolt(BOLT_ID, bolt, 4)
-                .shuffleGrouping(SENTENCE_SPOUT_ID);
-
-
-        if (args.length == 2) {
-            LocalCluster cluster = new LocalCluster();
-
-            cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
-            waitForSeconds(120);
-            cluster.killTopology(TOPOLOGY_NAME);
-            cluster.shutdown();
-            System.exit(0);
-        } else if(args.length == 3) {
-            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
-        } else{
-            System.out.println("Usage: SequenceFileTopology [hdfs url] [hdfs yaml config file] <topology name>");
-        }
-    }
-
-    public static void waitForSeconds(int seconds) {
-        try {
-            Thread.sleep(seconds * 1000);
-        } catch (InterruptedException e) {
-        }
-    }
-
-
-    public static class SentenceSpout extends BaseRichSpout {
-
-
-        private ConcurrentHashMap<UUID, Values> pending;
-        private SpoutOutputCollector collector;
-        private String[] sentences = {
-                "my dog has fleas",
-                "i like cold beverages",
-                "the dog ate my homework",
-                "don't have a cow man",
-                "i don't think i like fleas"
-        };
-        private int index = 0;
-        private int count = 0;
-        private long total = 0L;
-
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            declarer.declare(new Fields("sentence", "timestamp"));
-        }
-
-        public void open(Map config, TopologyContext context,
-                         SpoutOutputCollector collector) {
-            this.collector = collector;
-            this.pending = new ConcurrentHashMap<UUID, Values>();
-        }
-
-        public void nextTuple() {
-            Values values = new Values(sentences[index], System.currentTimeMillis());
-            UUID msgId = UUID.randomUUID();
-            this.pending.put(msgId, values);
-            this.collector.emit(values, msgId);
-            index++;
-            if (index >= sentences.length) {
-                index = 0;
-            }
-            count++;
-            total++;
-            if(count > 20000){
-                count = 0;
-                System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
-            }
-            Thread.yield();
-        }
-
-        public void ack(Object msgId) {
-//            System.out.println("ACK");
-            this.pending.remove(msgId);
-        }
-
-        public void fail(Object msgId) {
-            System.out.println("**** RESENDING FAILED TUPLE");
-            this.collector.emit(this.pending.get(msgId), msgId);
-        }
-    }
-
-
-    public static class MyBolt extends BaseRichBolt {
-
-        private HashMap<String, Long> counts = null;
-        private OutputCollector collector;
-
-        public void prepare(Map config, TopologyContext context, OutputCollector collector) {
-            this.counts = new HashMap<String, Long>();
-            this.collector = collector;
-        }
-
-        public void execute(Tuple tuple) {
-            collector.ack(tuple);
-        }
-
-
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            // this bolt does not emit anything
-        }
-
-        @Override
-        public void cleanup() {
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/FixedBatchSpout.java
----------------------------------------------------------------------
diff --git a/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/FixedBatchSpout.java b/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/FixedBatchSpout.java
deleted file mode 100644
index 76cc2aa..0000000
--- a/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/FixedBatchSpout.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hdfs.trident;
-
-import org.apache.storm.Config;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.trident.operation.TridentCollector;
-import org.apache.storm.trident.spout.IBatchSpout;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class FixedBatchSpout implements IBatchSpout {
-
-    Fields fields;
-    List<Object>[] outputs;
-    int maxBatchSize;
-    HashMap<Long, List<List<Object>>> batches = new HashMap<Long, List<List<Object>>>();
-
-    public FixedBatchSpout(Fields fields, int maxBatchSize, List<Object>... outputs) {
-        this.fields = fields;
-        this.outputs = outputs;
-        this.maxBatchSize = maxBatchSize;
-    }
-
-    int index = 0;
-    boolean cycle = false;
-
-    public void setCycle(boolean cycle) {
-        this.cycle = cycle;
-    }
-
-    @Override
-    public void open(Map conf, TopologyContext context) {
-        index = 0;
-    }
-
-    @Override
-    public void emitBatch(long batchId, TridentCollector collector) {
-        List<List<Object>> batch = this.batches.get(batchId);
-        if(batch == null){
-            batch = new ArrayList<List<Object>>();
-            if(index>=outputs.length && cycle) {
-                index = 0;
-            }
-            for(int i=0; i < maxBatchSize; index++, i++) {
-                if(index == outputs.length){
-                    index=0;
-                }
-                batch.add(outputs[index]);
-            }
-            this.batches.put(batchId, batch);
-        }
-        for(List<Object> list : batch){
-            collector.emit(list);
-        }
-    }
-
-    @Override
-    public void ack(long batchId) {
-        this.batches.remove(batchId);
-    }
-
-    @Override
-    public void close() {
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        Config conf = new Config();
-        conf.setMaxTaskParallelism(1);
-        return conf;
-    }
-
-    @Override
-    public Fields getOutputFields() {
-        return fields;
-    }
-}


[08/10] storm git commit: address review comments

Posted by ka...@apache.org.
address review comments


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/d19816d0
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/d19816d0
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/d19816d0

Branch: refs/heads/1.x-branch
Commit: d19816d0b6522d6f5c185726fd476007aa5345c0
Parents: 97fe209
Author: Xin Wang <be...@163.com>
Authored: Sun Sep 4 22:17:33 2016 +0800
Committer: Jungtaek Lim <ka...@gmail.com>
Committed: Mon Sep 12 13:15:03 2016 +0900

----------------------------------------------------------------------
 examples/storm-elasticsearch-examples/pom.xml | 46 +++++++++++++++++++++-
 examples/storm-hbase-examples/pom.xml         | 46 +++++++++++++++++++++-
 examples/storm-hdfs-examples/pom.xml          | 46 +++++++++++++++++++++-
 examples/storm-hive-examples/pom.xml          | 46 +++++++++++++++++++++-
 examples/storm-jdbc-examples/pom.xml          | 46 +++++++++++++++++++++-
 examples/storm-kafka-examples/pom.xml         | 46 +++++++++++++++++++++-
 examples/storm-mongodb-examples/pom.xml       | 46 +++++++++++++++++++++-
 examples/storm-mqtt-examples/pom.xml          | 12 +++++-
 examples/storm-opentsdb-examples/pom.xml      | 46 +++++++++++++++++++++-
 examples/storm-redis-examples/pom.xml         | 46 +++++++++++++++++++++-
 examples/storm-solr-examples/pom.xml          | 46 +++++++++++++++++++++-
 11 files changed, 451 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/d19816d0/examples/storm-elasticsearch-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-elasticsearch-examples/pom.xml b/examples/storm-elasticsearch-examples/pom.xml
index eceb196..3666607 100644
--- a/examples/storm-elasticsearch-examples/pom.xml
+++ b/examples/storm-elasticsearch-examples/pom.xml
@@ -27,18 +27,60 @@
 
     <artifactId>storm-elasticsearch-examples</artifactId>
 
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <provided.scope>provided</provided.scope>
+    </properties>
+
+    <profiles>
+        <profile>
+            <id>intellij</id>
+            <properties>
+                <provided.scope>compile</provided.scope>
+            </properties>
+        </profile>
+    </profiles>
+
     <dependencies>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-core</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
+            <scope>${provided.scope}</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-elasticsearch</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/storm/blob/d19816d0/examples/storm-hbase-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-hbase-examples/pom.xml b/examples/storm-hbase-examples/pom.xml
index ac5faaf..cdee608 100644
--- a/examples/storm-hbase-examples/pom.xml
+++ b/examples/storm-hbase-examples/pom.xml
@@ -27,18 +27,60 @@
 
     <artifactId>storm-hbase-examples</artifactId>
 
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <provided.scope>provided</provided.scope>
+    </properties>
+
+    <profiles>
+        <profile>
+            <id>intellij</id>
+            <properties>
+                <provided.scope>compile</provided.scope>
+            </properties>
+        </profile>
+    </profiles>
+
     <dependencies>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-core</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
+            <scope>${provided.scope}</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-hbase</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/storm/blob/d19816d0/examples/storm-hdfs-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-hdfs-examples/pom.xml b/examples/storm-hdfs-examples/pom.xml
index 0214ce1..690abbc 100644
--- a/examples/storm-hdfs-examples/pom.xml
+++ b/examples/storm-hdfs-examples/pom.xml
@@ -27,18 +27,60 @@
 
     <artifactId>storm-hdfs-examples</artifactId>
 
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <provided.scope>provided</provided.scope>
+    </properties>
+
+    <profiles>
+        <profile>
+            <id>intellij</id>
+            <properties>
+                <provided.scope>compile</provided.scope>
+            </properties>
+        </profile>
+    </profiles>
+
     <dependencies>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-core</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
+            <scope>${provided.scope}</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-hdfs</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/storm/blob/d19816d0/examples/storm-hive-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-hive-examples/pom.xml b/examples/storm-hive-examples/pom.xml
index bb0220a..f9418f7 100644
--- a/examples/storm-hive-examples/pom.xml
+++ b/examples/storm-hive-examples/pom.xml
@@ -27,18 +27,60 @@
 
     <artifactId>storm-hive-examples</artifactId>
 
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <provided.scope>provided</provided.scope>
+    </properties>
+
+    <profiles>
+        <profile>
+            <id>intellij</id>
+            <properties>
+                <provided.scope>compile</provided.scope>
+            </properties>
+        </profile>
+    </profiles>
+
     <dependencies>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-core</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
+            <scope>${provided.scope}</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-hive</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/storm/blob/d19816d0/examples/storm-jdbc-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-jdbc-examples/pom.xml b/examples/storm-jdbc-examples/pom.xml
index dc90fcd..ce1e08c 100644
--- a/examples/storm-jdbc-examples/pom.xml
+++ b/examples/storm-jdbc-examples/pom.xml
@@ -27,18 +27,60 @@
 
     <artifactId>storm-jdbc-examples</artifactId>
 
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <provided.scope>provided</provided.scope>
+    </properties>
+
+    <profiles>
+        <profile>
+            <id>intellij</id>
+            <properties>
+                <provided.scope>compile</provided.scope>
+            </properties>
+        </profile>
+    </profiles>
+
     <dependencies>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-core</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
+            <scope>${provided.scope}</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-jdbc</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/storm/blob/d19816d0/examples/storm-kafka-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-kafka-examples/pom.xml b/examples/storm-kafka-examples/pom.xml
index 7133ad4..b0ead4f 100644
--- a/examples/storm-kafka-examples/pom.xml
+++ b/examples/storm-kafka-examples/pom.xml
@@ -27,18 +27,60 @@
 
     <artifactId>storm-kafka-examples</artifactId>
 
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <provided.scope>provided</provided.scope>
+    </properties>
+
+    <profiles>
+        <profile>
+            <id>intellij</id>
+            <properties>
+                <provided.scope>compile</provided.scope>
+            </properties>
+        </profile>
+    </profiles>
+
     <dependencies>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-core</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
+            <scope>${provided.scope}</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-kafka</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/storm/blob/d19816d0/examples/storm-mongodb-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-mongodb-examples/pom.xml b/examples/storm-mongodb-examples/pom.xml
index 6b952f7..275c386 100644
--- a/examples/storm-mongodb-examples/pom.xml
+++ b/examples/storm-mongodb-examples/pom.xml
@@ -27,18 +27,60 @@
 
     <artifactId>storm-mongodb-examples</artifactId>
 
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <provided.scope>provided</provided.scope>
+    </properties>
+
+    <profiles>
+        <profile>
+            <id>intellij</id>
+            <properties>
+                <provided.scope>compile</provided.scope>
+            </properties>
+        </profile>
+    </profiles>
+
     <dependencies>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-core</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
+            <scope>${provided.scope}</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-mongodb</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/storm/blob/d19816d0/examples/storm-mqtt-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-mqtt-examples/pom.xml b/examples/storm-mqtt-examples/pom.xml
index d6f3a91..dd04cf3 100644
--- a/examples/storm-mqtt-examples/pom.xml
+++ b/examples/storm-mqtt-examples/pom.xml
@@ -32,14 +32,24 @@
 
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <provided.scope>provided</provided.scope>
   </properties>
 
+  <profiles>
+    <profile>
+      <id>intellij</id>
+      <properties>
+        <provided.scope>compile</provided.scope>
+      </properties>
+    </profile>
+  </profiles>
+
   <dependencies>
    <dependency>
       <groupId>org.apache.storm</groupId>
       <artifactId>storm-core</artifactId>
       <version>${project.version}</version>
-      <scope>provided</scope>
+      <scope>${provided.scope}</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.storm</groupId>

http://git-wip-us.apache.org/repos/asf/storm/blob/d19816d0/examples/storm-opentsdb-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-opentsdb-examples/pom.xml b/examples/storm-opentsdb-examples/pom.xml
index 057efae..5ffca52 100644
--- a/examples/storm-opentsdb-examples/pom.xml
+++ b/examples/storm-opentsdb-examples/pom.xml
@@ -27,18 +27,60 @@
 
     <artifactId>storm-opentsdb-examples</artifactId>
 
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <provided.scope>provided</provided.scope>
+    </properties>
+
+    <profiles>
+        <profile>
+            <id>intellij</id>
+            <properties>
+                <provided.scope>compile</provided.scope>
+            </properties>
+        </profile>
+    </profiles>
+
     <dependencies>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-core</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
+            <scope>${provided.scope}</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-opentsdb</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/storm/blob/d19816d0/examples/storm-redis-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/pom.xml b/examples/storm-redis-examples/pom.xml
index 91a8660..4192b67 100644
--- a/examples/storm-redis-examples/pom.xml
+++ b/examples/storm-redis-examples/pom.xml
@@ -27,18 +27,60 @@
 
     <artifactId>storm-redis-examples</artifactId>
 
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <provided.scope>provided</provided.scope>
+    </properties>
+
+    <profiles>
+        <profile>
+            <id>intellij</id>
+            <properties>
+                <provided.scope>compile</provided.scope>
+            </properties>
+        </profile>
+    </profiles>
+
     <dependencies>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-core</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
+            <scope>${provided.scope}</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-redis</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/storm/blob/d19816d0/examples/storm-solr-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-solr-examples/pom.xml b/examples/storm-solr-examples/pom.xml
index e64ec5f..7863219 100644
--- a/examples/storm-solr-examples/pom.xml
+++ b/examples/storm-solr-examples/pom.xml
@@ -27,18 +27,60 @@
 
     <artifactId>storm-solr-examples</artifactId>
 
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <provided.scope>provided</provided.scope>
+    </properties>
+
+    <profiles>
+        <profile>
+            <id>intellij</id>
+            <properties>
+                <provided.scope>compile</provided.scope>
+            </properties>
+        </profile>
+    </profiles>
+
     <dependencies>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-core</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
+            <scope>${provided.scope}</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.storm</groupId>
             <artifactId>storm-solr</artifactId>
             <version>${project.version}</version>
-            <scope>provided</scope>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 </project>


[07/10] storm git commit: STORM-1970: external project examples refator

Posted by ka...@apache.org.
STORM-1970: external project examples refator

* resolve conflict by Jungtaek Lim (kabhwan@gmail.com)
  * resolve version mismatch and some external modules after added


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/97fe209e
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/97fe209e
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/97fe209e

Branch: refs/heads/1.x-branch
Commit: 97fe209ee6c7d2abe6807732e03baa8950c768ea
Parents: bc0a1b8
Author: vesense <be...@163.com>
Authored: Thu Jul 14 17:13:39 2016 +0800
Committer: Jungtaek Lim <ka...@gmail.com>
Committed: Mon Sep 12 13:09:35 2016 +0900

----------------------------------------------------------------------
 examples/storm-elasticsearch-examples/pom.xml   |  44 ++++
 .../elasticsearch/bolt/EsIndexTopology.java     | 120 +++++++++++
 .../storm/elasticsearch/common/EsConstants.java |  22 ++
 .../storm/elasticsearch/common/EsTestUtil.java  |  75 +++++++
 .../trident/TridentEsTopology.java              | 135 +++++++++++++
 examples/storm-hbase-examples/pom.xml           |  44 ++++
 .../storm/hbase/topology/LookupWordCount.java   |  79 ++++++++
 .../hbase/topology/PersistentWordCount.java     |  91 +++++++++
 .../storm/hbase/topology/TotalWordCounter.java  |  70 +++++++
 .../storm/hbase/topology/WordCountClient.java   |  57 ++++++
 .../hbase/topology/WordCountValueMapper.java    |  70 +++++++
 .../storm/hbase/topology/WordCounter.java       |  59 ++++++
 .../apache/storm/hbase/topology/WordSpout.java  |  88 ++++++++
 .../storm/hbase/trident/PrintFunction.java      |  40 ++++
 .../storm/hbase/trident/WordCountTrident.java   | 104 ++++++++++
 examples/storm-hdfs-examples/pom.xml            |  44 ++++
 .../storm/hdfs/bolt/HdfsFileTopology.java       | 196 ++++++++++++++++++
 .../storm/hdfs/bolt/SequenceFileTopology.java   | 202 +++++++++++++++++++
 .../storm/hdfs/trident/FixedBatchSpout.java     |  97 +++++++++
 .../storm/hdfs/trident/TridentFileTopology.java |  99 +++++++++
 .../hdfs/trident/TridentSequenceTopology.java   |  96 +++++++++
 examples/storm-hive-examples/pom.xml            |  44 ++++
 .../storm/hive/bolt/BucketTestHiveTopology.java | 189 +++++++++++++++++
 .../apache/storm/hive/bolt/HiveTopology.java    | 151 ++++++++++++++
 .../hive/bolt/HiveTopologyPartitioned.java      | 153 ++++++++++++++
 .../storm/hive/trident/TridentHiveTopology.java | 199 ++++++++++++++++++
 examples/storm-jdbc-examples/pom.xml            |  44 ++++
 .../org/apache/storm/jdbc/spout/UserSpout.java  |  90 +++++++++
 .../jdbc/topology/AbstractUserTopology.java     | 115 +++++++++++
 .../jdbc/topology/UserPersistanceTopology.java  |  62 ++++++
 .../UserPersistanceTridentTopology.java         |  61 ++++++
 examples/storm-kafka-examples/pom.xml           |  44 ++++
 .../storm/kafka/TridentKafkaTopology.java       |  91 +++++++++
 examples/storm-mongodb-examples/pom.xml         |  44 ++++
 .../storm/mongodb/topology/InsertWordCount.java |  81 ++++++++
 .../storm/mongodb/topology/UpdateWordCount.java |  91 +++++++++
 .../storm/mongodb/topology/WordCounter.java     |  67 ++++++
 .../storm/mongodb/topology/WordSpout.java       |  88 ++++++++
 .../storm/mongodb/trident/WordCountTrident.java |  85 ++++++++
 examples/storm-mqtt-examples/pom.xml            | 115 +++++++++++
 .../src/main/flux/sample.yaml                   |  62 ++++++
 .../src/main/flux/ssl-sample.yaml               |  78 +++++++
 .../mqtt/examples/CustomMessageMapper.java      |  49 +++++
 .../mqtt/examples/MqttBrokerPublisher.java      | 102 ++++++++++
 .../src/main/resources/log4j2.xml               |  32 +++
 examples/storm-opentsdb-examples/pom.xml        |  44 ++++
 .../storm/opentsdb/MetricGenBatchSpout.java     |  94 +++++++++
 .../apache/storm/opentsdb/MetricGenSpout.java   |  72 +++++++
 .../opentsdb/SampleOpenTsdbBoltTopology.java    |  70 +++++++
 .../opentsdb/SampleOpenTsdbTridentTopology.java |  87 ++++++++
 examples/storm-redis-examples/pom.xml           |  44 ++++
 .../storm/redis/topology/LookupWordCount.java   | 166 +++++++++++++++
 .../redis/topology/PersistentWordCount.java     | 116 +++++++++++
 .../redis/topology/WhitelistWordCount.java      | 155 ++++++++++++++
 .../storm/redis/topology/WordCounter.java       |  67 ++++++
 .../apache/storm/redis/topology/WordSpout.java  |  88 ++++++++
 .../storm/redis/trident/PrintFunction.java      |  40 ++++
 .../redis/trident/WordCountLookupMapper.java    |  57 ++++++
 .../redis/trident/WordCountStoreMapper.java     |  39 ++++
 .../redis/trident/WordCountTridentRedis.java    |  98 +++++++++
 .../trident/WordCountTridentRedisCluster.java   | 106 ++++++++++
 .../WordCountTridentRedisClusterMap.java        | 101 ++++++++++
 .../redis/trident/WordCountTridentRedisMap.java |  94 +++++++++
 examples/storm-solr-examples/pom.xml            |  44 ++++
 .../storm/solr/spout/SolrFieldsSpout.java       |  76 +++++++
 .../apache/storm/solr/spout/SolrJsonSpout.java  | 116 +++++++++++
 .../storm/solr/topology/SolrFieldsTopology.java |  56 +++++
 .../storm/solr/topology/SolrJsonTopology.java   |  48 +++++
 .../storm/solr/topology/SolrTopology.java       |  82 ++++++++
 .../solr/trident/SolrFieldsTridentTopology.java |  45 +++++
 .../solr/trident/SolrJsonTridentTopology.java   |  45 +++++
 .../org/apache/storm/solr/util/TestUtil.java    |  30 +++
 .../elasticsearch/bolt/EsIndexTopology.java     | 120 -----------
 .../trident/TridentEsTopology.java              | 135 -------------
 .../storm/hbase/topology/LookupWordCount.java   |  79 --------
 .../hbase/topology/PersistentWordCount.java     |  91 ---------
 .../storm/hbase/topology/TotalWordCounter.java  |  70 -------
 .../storm/hbase/topology/WordCountClient.java   |  57 ------
 .../hbase/topology/WordCountValueMapper.java    |  70 -------
 .../storm/hbase/topology/WordCounter.java       |  59 ------
 .../apache/storm/hbase/topology/WordSpout.java  |  88 --------
 .../storm/hbase/trident/PrintFunction.java      |  40 ----
 .../storm/hbase/trident/WordCountTrident.java   | 104 ----------
 .../storm/hdfs/bolt/HdfsFileTopology.java       | 196 ------------------
 .../storm/hdfs/bolt/SequenceFileTopology.java   | 202 -------------------
 .../storm/hdfs/trident/FixedBatchSpout.java     |  97 ---------
 .../storm/hdfs/trident/TridentFileTopology.java |  99 ---------
 .../hdfs/trident/TridentSequenceTopology.java   |  96 ---------
 .../storm/hive/bolt/BucketTestHiveTopology.java | 190 -----------------
 .../apache/storm/hive/bolt/HiveTopology.java    | 151 --------------
 .../hive/bolt/HiveTopologyPartitioned.java      | 153 --------------
 .../storm/hive/trident/TridentHiveTopology.java | 199 ------------------
 .../org/apache/storm/jdbc/spout/UserSpout.java  |  90 ---------
 .../jdbc/topology/AbstractUserTopology.java     | 115 -----------
 .../jdbc/topology/UserPersistanceTopology.java  |  62 ------
 .../UserPersistanceTridentTopology.java         |  61 ------
 .../storm/kafka/TridentKafkaTopology.java       |  91 ---------
 .../storm/mongodb/topology/InsertWordCount.java |  81 --------
 .../storm/mongodb/topology/UpdateWordCount.java |  91 ---------
 .../storm/mongodb/topology/WordCounter.java     |  67 ------
 .../storm/mongodb/topology/WordSpout.java       |  88 --------
 .../storm/mongodb/trident/WordCountTrident.java |  85 --------
 external/storm-mqtt/examples/pom.xml            | 115 -----------
 .../examples/src/main/flux/sample.yaml          |  62 ------
 .../examples/src/main/flux/ssl-sample.yaml      |  78 -------
 .../mqtt/examples/CustomMessageMapper.java      |  49 -----
 .../mqtt/examples/MqttBrokerPublisher.java      | 102 ----------
 .../examples/src/main/resources/log4j2.xml      |  32 ---
 external/storm-mqtt/pom.xml                     |   1 -
 .../storm/opentsdb/MetricGenBatchSpout.java     |  94 ---------
 .../apache/storm/opentsdb/MetricGenSpout.java   |  72 -------
 .../opentsdb/SampleOpenTsdbBoltTopology.java    |  70 -------
 .../opentsdb/SampleOpenTsdbTridentTopology.java |  87 --------
 .../storm/redis/topology/LookupWordCount.java   | 166 ---------------
 .../redis/topology/PersistentWordCount.java     | 116 -----------
 .../redis/topology/WhitelistWordCount.java      | 155 --------------
 .../storm/redis/topology/WordCounter.java       |  67 ------
 .../apache/storm/redis/topology/WordSpout.java  |  88 --------
 .../storm/redis/trident/PrintFunction.java      |  40 ----
 .../redis/trident/WordCountLookupMapper.java    |  57 ------
 .../redis/trident/WordCountStoreMapper.java     |  39 ----
 .../redis/trident/WordCountTridentRedis.java    |  98 ---------
 .../trident/WordCountTridentRedisCluster.java   | 106 ----------
 .../WordCountTridentRedisClusterMap.java        | 101 ----------
 .../redis/trident/WordCountTridentRedisMap.java |  94 ---------
 .../storm/solr/spout/SolrFieldsSpout.java       |  76 -------
 .../apache/storm/solr/spout/SolrJsonSpout.java  | 120 -----------
 .../storm/solr/topology/SolrFieldsTopology.java |  56 -----
 .../storm/solr/topology/SolrJsonTopology.java   |  48 -----
 .../storm/solr/topology/SolrTopology.java       |  82 --------
 .../solr/trident/SolrFieldsTridentTopology.java |  45 -----
 .../solr/trident/SolrJsonTridentTopology.java   |  45 -----
 .../org/apache/storm/solr/util/TestUtil.java    |  30 ---
 pom.xml                                         |  23 ++-
 134 files changed, 6068 insertions(+), 5522 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-elasticsearch-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-elasticsearch-examples/pom.xml b/examples/storm-elasticsearch-examples/pom.xml
new file mode 100644
index 0000000..eceb196
--- /dev/null
+++ b/examples/storm-elasticsearch-examples/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>storm</artifactId>
+        <groupId>org.apache.storm</groupId>
+        <version>1.1.0-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>storm-elasticsearch-examples</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-elasticsearch</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/bolt/EsIndexTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/bolt/EsIndexTopology.java b/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/bolt/EsIndexTopology.java
new file mode 100644
index 0000000..d30424b
--- /dev/null
+++ b/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/bolt/EsIndexTopology.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.elasticsearch.bolt;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.topology.base.BaseRichSpout;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.elasticsearch.common.EsConfig;
+import org.apache.storm.elasticsearch.common.EsConstants;
+import org.apache.storm.elasticsearch.common.EsTestUtil;
+import org.apache.storm.elasticsearch.common.EsTupleMapper;
+
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class EsIndexTopology {
+
+    static final String SPOUT_ID = "spout";
+    static final String BOLT_ID = "bolt";
+    static final String TOPOLOGY_NAME = "elasticsearch-test-topology1";
+
+    public static void main(String[] args) throws Exception {
+        Config config = new Config();
+        config.setNumWorkers(1);
+        TopologyBuilder builder = new TopologyBuilder();
+        UserDataSpout spout = new UserDataSpout();
+        builder.setSpout(SPOUT_ID, spout, 1);
+        EsTupleMapper tupleMapper = EsTestUtil.generateDefaultTupleMapper();
+        EsConfig esConfig = new EsConfig(EsConstants.clusterName, new String[]{"localhost:9300"});
+        builder.setBolt(BOLT_ID, new EsIndexBolt(esConfig, tupleMapper), 1).shuffleGrouping(SPOUT_ID);
+
+        EsTestUtil.startEsNode();
+        EsTestUtil.waitForSeconds(5);
+
+        LocalCluster cluster = new LocalCluster();
+        cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
+        EsTestUtil.waitForSeconds(20);
+        cluster.killTopology(TOPOLOGY_NAME);
+        System.out.println("cluster begin to shutdown");
+        cluster.shutdown();
+        System.out.println("cluster shutdown");
+        System.exit(0);
+    }
+
+    public static class UserDataSpout extends BaseRichSpout {
+        private ConcurrentHashMap<UUID, Values> pending;
+        private SpoutOutputCollector collector;
+        private String[] sources = {
+                "{\"user\":\"user1\"}",
+                "{\"user\":\"user2\"}",
+                "{\"user\":\"user3\"}",
+                "{\"user\":\"user4\"}"
+        };
+        private int index = 0;
+        private int count = 0;
+        private long total = 0L;
+        private String indexName = "index1";
+        private String typeName = "type1";
+
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+            declarer.declare(new Fields("source", "index", "type", "id"));
+        }
+
+        public void open(Map config, TopologyContext context,
+                         SpoutOutputCollector collector) {
+            this.collector = collector;
+            this.pending = new ConcurrentHashMap<UUID, Values>();
+        }
+
+        public void nextTuple() {
+            String source = sources[index];
+            UUID msgId = UUID.randomUUID();
+            Values values = new Values(source, indexName, typeName, msgId);
+            this.pending.put(msgId, values);
+            this.collector.emit(values, msgId);
+            index++;
+            if (index >= sources.length) {
+                index = 0;
+            }
+            count++;
+            total++;
+            if (count > 1000) {
+                count = 0;
+                System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
+            }
+            Thread.yield();
+        }
+
+        public void ack(Object msgId) {
+            this.pending.remove(msgId);
+        }
+
+        public void fail(Object msgId) {
+            System.out.println("**** RESENDING FAILED TUPLE");
+            this.collector.emit(this.pending.get(msgId), msgId);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/common/EsConstants.java
----------------------------------------------------------------------
diff --git a/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/common/EsConstants.java b/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/common/EsConstants.java
new file mode 100644
index 0000000..98bb71d
--- /dev/null
+++ b/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/common/EsConstants.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.elasticsearch.common;
+
+public class EsConstants {
+    public static String clusterName = "test-cluster";
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/common/EsTestUtil.java
----------------------------------------------------------------------
diff --git a/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/common/EsTestUtil.java b/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/common/EsTestUtil.java
new file mode 100644
index 0000000..cb1c745
--- /dev/null
+++ b/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/common/EsTestUtil.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.elasticsearch.common;
+
+import org.apache.storm.Config;
+import org.apache.storm.task.GeneralTopologyContext;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.ITuple;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.tuple.TupleImpl;
+import org.apache.storm.tuple.Values;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.settings.ImmutableSettings;
+import org.elasticsearch.common.util.concurrent.EsExecutors;
+import org.elasticsearch.node.Node;
+import org.elasticsearch.node.NodeBuilder;
+
+import java.util.HashMap;
+
+public class EsTestUtil {
+    public static Tuple generateTestTuple(String source, String index, String type, String id) {
+        TopologyBuilder builder = new TopologyBuilder();
+        GeneralTopologyContext topologyContext = new GeneralTopologyContext(builder.createTopology(),
+                new Config(), new HashMap(), new HashMap(), new HashMap(), "") {
+            @Override
+            public Fields getComponentOutputFields(String componentId, String streamId) {
+                return new Fields("source", "index", "type", "id");
+            }
+        };
+        return new TupleImpl(topologyContext, new Values(source, index, type, id), 1, "");
+    }
+
+    public static EsTupleMapper generateDefaultTupleMapper() {
+        return new DefaultEsTupleMapper();
+    }
+
+    public static Node startEsNode(){
+        Node node = NodeBuilder.nodeBuilder().data(true).settings(
+                ImmutableSettings.builder()
+                        .put(ClusterName.SETTING, EsConstants.clusterName)
+                        .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
+                        .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
+                        .put(EsExecutors.PROCESSORS, 1)
+                        .put("http.enabled", false)
+                        .put("index.percolator.map_unmapped_fields_as_string", true)
+                        .put("index.store.type", "memory")
+        ).build();
+        node.start();
+        return node;
+    }
+
+    public static void waitForSeconds(int seconds) {
+        try {
+            Thread.sleep(seconds * 1000);
+        } catch (InterruptedException e) {
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/trident/TridentEsTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/trident/TridentEsTopology.java b/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/trident/TridentEsTopology.java
new file mode 100644
index 0000000..67eab5b
--- /dev/null
+++ b/examples/storm-elasticsearch-examples/src/main/java/org/apache/storm/elasticsearch/trident/TridentEsTopology.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.elasticsearch.trident;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.elasticsearch.common.EsConfig;
+import org.apache.storm.elasticsearch.common.EsConstants;
+import org.apache.storm.elasticsearch.common.EsTestUtil;
+import org.apache.storm.elasticsearch.common.EsTupleMapper;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentState;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.operation.TridentCollector;
+import org.apache.storm.trident.spout.IBatchSpout;
+import org.apache.storm.trident.state.StateFactory;
+
+import java.util.*;
+
+public class TridentEsTopology {
+
+    static final String TOPOLOGY_NAME = "elasticsearch-test-topology2";
+
+    public static void main(String[] args) {
+        int batchSize = 100;
+        FixedBatchSpout spout = new FixedBatchSpout(batchSize);
+        spout.setCycle(true);
+
+        TridentTopology topology = new TridentTopology();
+        Stream stream = topology.newStream("spout", spout);
+        EsConfig esConfig = new EsConfig(EsConstants.clusterName, new String[]{"localhost:9300"});
+        Fields esFields = new Fields("index", "type", "source");
+        EsTupleMapper tupleMapper = EsTestUtil.generateDefaultTupleMapper();
+        StateFactory factory = new EsStateFactory(esConfig, tupleMapper);
+        TridentState state = stream.partitionPersist(factory, esFields, new EsUpdater(), new Fields());
+
+        EsTestUtil.startEsNode();
+        EsTestUtil.waitForSeconds(5);
+
+        LocalCluster cluster = new LocalCluster();
+        cluster.submitTopology(TOPOLOGY_NAME, null, topology.build());
+        EsTestUtil.waitForSeconds(20);
+        cluster.killTopology(TOPOLOGY_NAME);
+        System.out.println("cluster begin to shutdown");
+        cluster.shutdown();
+        System.out.println("cluster shutdown");
+        System.exit(0);
+    }
+
+    public static class FixedBatchSpout implements IBatchSpout {
+        int maxBatchSize;
+        HashMap<Long, List<List<Object>>> batches = new HashMap<Long, List<List<Object>>>();
+        private Values[] outputs = {
+                new Values("{\"user\":\"user1\"}", "index1", "type1", UUID.randomUUID().toString()),
+                new Values("{\"user\":\"user2\"}", "index1", "type2", UUID.randomUUID().toString()),
+                new Values("{\"user\":\"user3\"}", "index2", "type1", UUID.randomUUID().toString()),
+                new Values("{\"user\":\"user4\"}", "index2", "type2", UUID.randomUUID().toString())
+        };
+        private int index = 0;
+        boolean cycle = false;
+
+        public FixedBatchSpout(int maxBatchSize) {
+            this.maxBatchSize = maxBatchSize;
+        }
+
+        public void setCycle(boolean cycle) {
+            this.cycle = cycle;
+        }
+
+        @Override
+        public Fields getOutputFields() {
+            return new Fields("source", "index", "type", "id");
+        }
+
+        @Override
+        public void open(Map conf, TopologyContext context) {
+            index = 0;
+        }
+
+        @Override
+        public void emitBatch(long batchId, TridentCollector collector) {
+            List<List<Object>> batch = this.batches.get(batchId);
+            if (batch == null) {
+                batch = new ArrayList<List<Object>>();
+                if (index >= outputs.length && cycle) {
+                    index = 0;
+                }
+                for (int i = 0; i < maxBatchSize; index++, i++) {
+                    if (index == outputs.length) {
+                        index = 0;
+                    }
+                    batch.add(outputs[index]);
+                }
+                this.batches.put(batchId, batch);
+            }
+            for (List<Object> list : batch) {
+                collector.emit(list);
+            }
+        }
+
+        @Override
+        public void ack(long batchId) {
+            this.batches.remove(batchId);
+        }
+
+        @Override
+        public void close() {
+        }
+
+        @Override
+        public Map<String, Object> getComponentConfiguration() {
+            Config conf = new Config();
+            conf.setMaxTaskParallelism(1);
+            return conf;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hbase-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-hbase-examples/pom.xml b/examples/storm-hbase-examples/pom.xml
new file mode 100644
index 0000000..ac5faaf
--- /dev/null
+++ b/examples/storm-hbase-examples/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>storm</artifactId>
+        <groupId>org.apache.storm</groupId>
+        <version>1.1.0-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>storm-hbase-examples</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-hbase</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/LookupWordCount.java
----------------------------------------------------------------------
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/LookupWordCount.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/LookupWordCount.java
new file mode 100644
index 0000000..43f72ae
--- /dev/null
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/LookupWordCount.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hbase.topology;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.hbase.bolt.HBaseLookupBolt;
+import org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria;
+import org.apache.storm.hbase.bolt.mapper.SimpleHBaseMapper;
+
+import java.util.HashMap;
+import java.util.Map;
+
+
+public class LookupWordCount {
+    private static final String WORD_SPOUT = "WORD_SPOUT";
+    private static final String LOOKUP_BOLT = "LOOKUP_BOLT";
+    private static final String TOTAL_COUNT_BOLT = "TOTAL_COUNT_BOLT";
+
+    public static void main(String[] args) throws Exception {
+        Config config = new Config();
+
+        Map<String, Object> hbConf = new HashMap<String, Object>();
+        if(args.length > 0){
+            hbConf.put("hbase.rootdir", args[0]);
+        }
+        config.put("hbase.conf", hbConf);
+
+        WordSpout spout = new WordSpout();
+        TotalWordCounter totalBolt = new TotalWordCounter();
+
+        SimpleHBaseMapper mapper = new SimpleHBaseMapper().withRowKeyField("word");
+        HBaseProjectionCriteria projectionCriteria = new HBaseProjectionCriteria();
+        projectionCriteria.addColumn(new HBaseProjectionCriteria.ColumnMetaData("cf", "count"));
+
+        WordCountValueMapper rowToTupleMapper = new WordCountValueMapper();
+
+        HBaseLookupBolt hBaseLookupBolt = new HBaseLookupBolt("WordCount", mapper, rowToTupleMapper)
+                .withConfigKey("hbase.conf")
+                .withProjectionCriteria(projectionCriteria);
+
+        //wordspout -> lookupbolt -> totalCountBolt
+        TopologyBuilder builder = new TopologyBuilder();
+        builder.setSpout(WORD_SPOUT, spout, 1);
+        builder.setBolt(LOOKUP_BOLT, hBaseLookupBolt, 1).shuffleGrouping(WORD_SPOUT);
+        builder.setBolt(TOTAL_COUNT_BOLT, totalBolt, 1).fieldsGrouping(LOOKUP_BOLT, new Fields("columnName"));
+
+        if (args.length == 1) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test", config, builder.createTopology());
+            Thread.sleep(30000);
+            cluster.killTopology("test");
+            cluster.shutdown();
+            System.exit(0);
+        } else if (args.length == 2) {
+            StormSubmitter.submitTopology(args[1], config, builder.createTopology());
+        } else{
+            System.out.println("Usage: LookupWordCount <hbase.rootdir>");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/PersistentWordCount.java
----------------------------------------------------------------------
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/PersistentWordCount.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/PersistentWordCount.java
new file mode 100644
index 0000000..cfb94d0
--- /dev/null
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/PersistentWordCount.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hbase.topology;
+
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.hbase.bolt.HBaseBolt;
+import org.apache.storm.hbase.bolt.mapper.SimpleHBaseMapper;
+import org.apache.storm.hbase.security.HBaseSecurityUtil;
+
+
+import java.util.HashMap;
+import java.util.Map;
+
+
+public class PersistentWordCount {
+    private static final String WORD_SPOUT = "WORD_SPOUT";
+    private static final String COUNT_BOLT = "COUNT_BOLT";
+    private static final String HBASE_BOLT = "HBASE_BOLT";
+
+
+    public static void main(String[] args) throws Exception {
+        Config config = new Config();
+
+        Map<String, Object> hbConf = new HashMap<String, Object>();
+        if(args.length > 0){
+            hbConf.put("hbase.rootdir", args[0]);
+        }
+        config.put("hbase.conf", hbConf);
+
+        WordSpout spout = new WordSpout();
+        WordCounter bolt = new WordCounter();
+
+        SimpleHBaseMapper mapper = new SimpleHBaseMapper()
+                .withRowKeyField("word")
+                .withColumnFields(new Fields("word"))
+                .withCounterFields(new Fields("count"))
+                .withColumnFamily("cf");
+
+        HBaseBolt hbase = new HBaseBolt("WordCount", mapper)
+                .withConfigKey("hbase.conf");
+
+
+        // wordSpout ==> countBolt ==> HBaseBolt
+        TopologyBuilder builder = new TopologyBuilder();
+
+        builder.setSpout(WORD_SPOUT, spout, 1);
+        builder.setBolt(COUNT_BOLT, bolt, 1).shuffleGrouping(WORD_SPOUT);
+        builder.setBolt(HBASE_BOLT, hbase, 1).fieldsGrouping(COUNT_BOLT, new Fields("word"));
+
+
+        if (args.length == 1) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test", config, builder.createTopology());
+            Thread.sleep(30000);
+            cluster.killTopology("test");
+            cluster.shutdown();
+            System.exit(0);
+        } else if (args.length == 2) {
+            StormSubmitter.submitTopology(args[1], config, builder.createTopology());
+        } else if (args.length == 4) {
+            System.out.println("hdfs url: " + args[0] + ", keytab file: " + args[2] + 
+                ", principal name: " + args[3] + ", toplogy name: " + args[1]);
+            hbConf.put(HBaseSecurityUtil.STORM_KEYTAB_FILE_KEY, args[2]);
+            hbConf.put(HBaseSecurityUtil.STORM_USER_NAME_KEY, args[3]);
+            config.setNumWorkers(3);
+            StormSubmitter.submitTopology(args[1], config, builder.createTopology());
+        } else {
+            System.out.println("Usage: PersistentWordCount <hbase.rootdir> [topology name] [keytab file] [principal name]");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/TotalWordCounter.java
----------------------------------------------------------------------
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/TotalWordCounter.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/TotalWordCounter.java
new file mode 100644
index 0000000..61b0dd8
--- /dev/null
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/TotalWordCounter.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hbase.topology;
+
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.BasicOutputCollector;
+import org.apache.storm.topology.IBasicBolt;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Tuple;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.math.BigInteger;
+import java.util.Map;
+import java.util.Random;
+
+import static org.apache.storm.utils.Utils.tuple;
+
+public class TotalWordCounter implements IBasicBolt {
+
+    private BigInteger total = BigInteger.ZERO;
+    private static final Logger LOG = LoggerFactory.getLogger(TotalWordCounter.class);
+    private static final Random RANDOM = new Random();
+    @SuppressWarnings("rawtypes")
+    public void prepare(Map stormConf, TopologyContext context) {
+    }
+
+    /*
+     * Just output the word value with a count of 1.
+     * The HBaseBolt will handle incrementing the counter.
+     */
+    public void execute(Tuple input, BasicOutputCollector collector) {
+        total = total.add(new BigInteger(input.getValues().get(1).toString()));
+        collector.emit(tuple(total.toString()));
+        //prints the total with low probability.
+        if(RANDOM.nextInt(1000) > 995) {
+            LOG.info("Running total = " + total);
+        }
+    }
+
+    public void cleanup() {
+        LOG.info("Final total = " + total);
+    }
+
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(new Fields("total"));
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return null;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountClient.java
----------------------------------------------------------------------
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountClient.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountClient.java
new file mode 100644
index 0000000..33ce450
--- /dev/null
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountClient.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hbase.topology;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * Connects to the 'WordCount' table and prints counts for each word.
+ *
+ * Assumes you have run (or are running) <code>PersistentWordCount</code>
+ */
+public class WordCountClient {
+
+    public static void main(String[] args) throws Exception {
+        Configuration config = HBaseConfiguration.create();
+        if(args.length > 0){
+            config.set("hbase.rootdir", args[0]);
+        }
+
+        HTable table = new HTable(config, "WordCount");
+
+
+        for (String word : WordSpout.words) {
+            Get get = new Get(Bytes.toBytes(word));
+            Result result = table.get(get);
+
+            byte[] countBytes = result.getValue(Bytes.toBytes("cf"), Bytes.toBytes("count"));
+            byte[] wordBytes = result.getValue(Bytes.toBytes("cf"), Bytes.toBytes("word"));
+
+            String wordStr = Bytes.toString(wordBytes);
+            System.out.println(wordStr);
+            long count = Bytes.toLong(countBytes);
+            System.out.println("Word: '" + wordStr + "', Count: " + count);
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountValueMapper.java
----------------------------------------------------------------------
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountValueMapper.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountValueMapper.java
new file mode 100644
index 0000000..6c3301b
--- /dev/null
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountValueMapper.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hbase.topology;
+
+
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.ITuple;
+import org.apache.storm.tuple.Values;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.storm.hbase.bolt.mapper.HBaseValueMapper;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Takes a Hbase result and returns a value list that has a value instance for each column and corresponding value.
+ * So if the result from Hbase was
+ * <pre>
+ * WORD, COUNT
+ * apple, 10
+ * bannana, 20
+ * </pre>
+ *
+ * this will return
+ * <pre>
+ *     [WORD, apple]
+ *     [COUNT, 10]
+ *     [WORD, banana]
+ *     [COUNT, 20]
+ * </pre>
+ *
+ */
+public class WordCountValueMapper implements HBaseValueMapper {
+
+    @Override
+    public List<Values> toValues(ITuple tuple, Result result) throws Exception {
+        List<Values> values = new ArrayList<Values>();
+        Cell[] cells = result.rawCells();
+        for(Cell cell : cells) {
+            Values value = new Values (Bytes.toString(CellUtil.cloneQualifier(cell)), Bytes.toLong(CellUtil.cloneValue(cell)));
+            values.add(value);
+        }
+        return values;
+    }
+
+    @Override
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(new Fields("columnName","columnValue"));
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCounter.java
----------------------------------------------------------------------
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCounter.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCounter.java
new file mode 100644
index 0000000..3a350a8
--- /dev/null
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCounter.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hbase.topology;
+
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.BasicOutputCollector;
+import org.apache.storm.topology.IBasicBolt;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Tuple;
+
+import java.util.Map;
+
+import static org.apache.storm.utils.Utils.tuple;
+
+public class WordCounter implements IBasicBolt {
+
+
+    @SuppressWarnings("rawtypes")
+    public void prepare(Map stormConf, TopologyContext context) {
+    }
+
+    /*
+     * Just output the word value with a count of 1.
+     * The HBaseBolt will handle incrementing the counter.
+     */
+    public void execute(Tuple input, BasicOutputCollector collector) {
+        collector.emit(tuple(input.getValues().get(0), 1));
+    }
+
+    public void cleanup() {
+
+    }
+
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(new Fields("word", "count"));
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return null;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordSpout.java
----------------------------------------------------------------------
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordSpout.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordSpout.java
new file mode 100644
index 0000000..c5fc490
--- /dev/null
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordSpout.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hbase.topology;
+
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.IRichSpout;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+
+import java.util.Map;
+import java.util.Random;
+import java.util.UUID;
+
+public class WordSpout implements IRichSpout {
+    boolean isDistributed;
+    SpoutOutputCollector collector;
+    public static final String[] words = new String[] { "apple", "orange", "pineapple", "banana", "watermelon" };
+
+    public WordSpout() {
+        this(true);
+    }
+
+    public WordSpout(boolean isDistributed) {
+        this.isDistributed = isDistributed;
+    }
+
+    public boolean isDistributed() {
+        return this.isDistributed;
+    }
+
+    @SuppressWarnings("rawtypes")
+    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+        this.collector = collector;
+    }
+
+    public void close() {
+
+    }
+
+    public void nextTuple() {
+        final Random rand = new Random();
+        final String word = words[rand.nextInt(words.length)];
+        this.collector.emit(new Values(word), UUID.randomUUID());
+        Thread.yield();
+    }
+
+    public void ack(Object msgId) {
+
+    }
+
+    public void fail(Object msgId) {
+
+    }
+
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(new Fields("word"));
+    }
+
+    @Override
+    public void activate() {
+    }
+
+    @Override
+    public void deactivate() {
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/trident/PrintFunction.java
----------------------------------------------------------------------
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/trident/PrintFunction.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/trident/PrintFunction.java
new file mode 100644
index 0000000..cdc7690
--- /dev/null
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/trident/PrintFunction.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hbase.trident;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.storm.trident.operation.BaseFunction;
+import org.apache.storm.trident.operation.TridentCollector;
+import org.apache.storm.trident.tuple.TridentTuple;
+
+import java.util.Random;
+
+public class PrintFunction extends BaseFunction {
+
+    private static final Logger LOG = LoggerFactory.getLogger(PrintFunction.class);
+
+    private static final Random RANDOM = new Random();
+
+    @Override
+    public void execute(TridentTuple tuple, TridentCollector tridentCollector) {
+        if(RANDOM.nextInt(1000) > 995) {
+            LOG.info(tuple.toString());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/trident/WordCountTrident.java
----------------------------------------------------------------------
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/trident/WordCountTrident.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/trident/WordCountTrident.java
new file mode 100644
index 0000000..b2f0ce8
--- /dev/null
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/trident/WordCountTrident.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hbase.trident;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria;
+import org.apache.storm.hbase.bolt.mapper.HBaseValueMapper;
+import org.apache.storm.hbase.topology.WordCountValueMapper;
+import org.apache.storm.hbase.trident.mapper.SimpleTridentHBaseMapper;
+import org.apache.storm.hbase.trident.mapper.TridentHBaseMapper;
+import org.apache.storm.hbase.trident.state.HBaseQuery;
+import org.apache.storm.hbase.trident.state.HBaseState;
+import org.apache.storm.hbase.trident.state.HBaseStateFactory;
+import org.apache.storm.hbase.trident.state.HBaseUpdater;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentState;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.state.StateFactory;
+import org.apache.storm.trident.testing.FixedBatchSpout;
+
+public class WordCountTrident {
+    public static StormTopology buildTopology(String hbaseRoot){
+        Fields fields = new Fields("word", "count");
+        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
+                new Values("storm", 1),
+                new Values("trident", 1),
+                new Values("needs", 1),
+                new Values("javadoc", 1)
+        );
+        spout.setCycle(true);
+
+        TridentHBaseMapper tridentHBaseMapper = new SimpleTridentHBaseMapper()
+                .withColumnFamily("cf")
+                .withColumnFields(new Fields("word"))
+                .withCounterFields(new Fields("count"))
+                .withRowKeyField("word");
+
+        HBaseValueMapper rowToStormValueMapper = new WordCountValueMapper();
+
+        HBaseProjectionCriteria projectionCriteria = new HBaseProjectionCriteria();
+        projectionCriteria.addColumn(new HBaseProjectionCriteria.ColumnMetaData("cf", "count"));
+
+        HBaseState.Options options = new HBaseState.Options()
+                .withConfigKey(hbaseRoot)
+                .withDurability(Durability.SYNC_WAL)
+                .withMapper(tridentHBaseMapper)
+                .withProjectionCriteria(projectionCriteria)
+                .withRowToStormValueMapper(rowToStormValueMapper)
+                .withTableName("WordCount");
+
+        StateFactory factory = new HBaseStateFactory(options);
+
+        TridentTopology topology = new TridentTopology();
+        Stream stream = topology.newStream("spout1", spout);
+
+        stream.partitionPersist(factory, fields,  new HBaseUpdater(), new Fields());
+
+        TridentState state = topology.newStaticState(factory);
+        stream = stream.stateQuery(state, new Fields("word"), new HBaseQuery(), new Fields("columnName","columnValue"));
+        stream.each(new Fields("word","columnValue"), new PrintFunction(), new Fields());
+        return topology.build();
+    }
+
+    public static void main(String[] args) throws Exception {
+        Config conf = new Config();
+        conf.setMaxSpoutPending(5);
+        if (args.length == 1) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("wordCounter", conf, buildTopology(args[0]));
+            Thread.sleep(60 * 1000);
+            cluster.killTopology("wordCounter");
+            cluster.shutdown();
+            System.exit(0);
+        }
+        else if(args.length == 2) {
+            conf.setNumWorkers(3);
+            StormSubmitter.submitTopology(args[1], conf, buildTopology(args[0]));
+        } else{
+            System.out.println("Usage: TridentFileTopology <hdfs url> [topology name]");
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hdfs-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-hdfs-examples/pom.xml b/examples/storm-hdfs-examples/pom.xml
new file mode 100644
index 0000000..0214ce1
--- /dev/null
+++ b/examples/storm-hdfs-examples/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>storm</artifactId>
+        <groupId>org.apache.storm</groupId>
+        <version>1.1.0-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>storm-hdfs-examples</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-hdfs</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/bolt/HdfsFileTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/bolt/HdfsFileTopology.java b/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/bolt/HdfsFileTopology.java
new file mode 100644
index 0000000..b1ae542
--- /dev/null
+++ b/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/bolt/HdfsFileTopology.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hdfs.bolt;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.OutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.topology.base.BaseRichBolt;
+import org.apache.storm.topology.base.BaseRichSpout;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.hdfs.bolt.format.DefaultFileNameFormat;
+import org.apache.storm.hdfs.bolt.format.DelimitedRecordFormat;
+import org.apache.storm.hdfs.bolt.format.FileNameFormat;
+import org.apache.storm.hdfs.bolt.format.RecordFormat;
+import org.apache.storm.hdfs.bolt.rotation.FileRotationPolicy;
+import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy;
+import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy.Units;
+import org.apache.storm.hdfs.bolt.rotation.TimedRotationPolicy;
+import org.apache.storm.hdfs.bolt.sync.CountSyncPolicy;
+import org.apache.storm.hdfs.bolt.sync.SyncPolicy;
+import org.apache.storm.hdfs.common.rotation.MoveFileAction;
+import org.yaml.snakeyaml.Yaml;
+
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class HdfsFileTopology {
+    static final String SENTENCE_SPOUT_ID = "sentence-spout";
+    static final String BOLT_ID = "my-bolt";
+    static final String TOPOLOGY_NAME = "test-topology";
+
+    public static void main(String[] args) throws Exception {
+        Config config = new Config();
+        config.setNumWorkers(1);
+
+        SentenceSpout spout = new SentenceSpout();
+
+        // sync the filesystem after every 1k tuples
+        SyncPolicy syncPolicy = new CountSyncPolicy(1000);
+
+        // rotate files when they reach 5MB
+        FileRotationPolicy rotationPolicy = new TimedRotationPolicy(1.0f, TimedRotationPolicy.TimeUnit.MINUTES);
+
+        FileNameFormat fileNameFormat = new DefaultFileNameFormat()
+                .withPath("/tmp/foo/")
+                .withExtension(".txt");
+
+        // use "|" instead of "," for field delimiter
+        RecordFormat format = new DelimitedRecordFormat()
+                .withFieldDelimiter("|");
+
+        Yaml yaml = new Yaml();
+        InputStream in = new FileInputStream(args[1]);
+        Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in);
+        in.close();
+        config.put("hdfs.config", yamlConf);
+
+        HdfsBolt bolt = new HdfsBolt()
+                .withConfigKey("hdfs.config")
+                .withFsUrl(args[0])
+                .withFileNameFormat(fileNameFormat)
+                .withRecordFormat(format)
+                .withRotationPolicy(rotationPolicy)
+                .withSyncPolicy(syncPolicy)
+                .addRotationAction(new MoveFileAction().toDestination("/tmp/dest2/"));
+
+        TopologyBuilder builder = new TopologyBuilder();
+
+        builder.setSpout(SENTENCE_SPOUT_ID, spout, 1);
+        // SentenceSpout --> MyBolt
+        builder.setBolt(BOLT_ID, bolt, 4)
+                .shuffleGrouping(SENTENCE_SPOUT_ID);
+
+        if (args.length == 2) {
+            LocalCluster cluster = new LocalCluster();
+
+            cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
+            waitForSeconds(120);
+            cluster.killTopology(TOPOLOGY_NAME);
+            cluster.shutdown();
+            System.exit(0);
+        } else if (args.length == 3) {
+            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
+        } else{
+            System.out.println("Usage: HdfsFileTopology [hdfs url] [hdfs yaml config file] <topology name>");
+        }
+    }
+
+    public static void waitForSeconds(int seconds) {
+        try {
+            Thread.sleep(seconds * 1000);
+        } catch (InterruptedException e) {
+        }
+    }
+
+    public static class SentenceSpout extends BaseRichSpout {
+        private ConcurrentHashMap<UUID, Values> pending;
+        private SpoutOutputCollector collector;
+        private String[] sentences = {
+                "my dog has fleas",
+                "i like cold beverages",
+                "the dog ate my homework",
+                "don't have a cow man",
+                "i don't think i like fleas"
+        };
+        private int index = 0;
+        private int count = 0;
+        private long total = 0L;
+
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+            declarer.declare(new Fields("sentence", "timestamp"));
+        }
+
+        public void open(Map config, TopologyContext context,
+                         SpoutOutputCollector collector) {
+            this.collector = collector;
+            this.pending = new ConcurrentHashMap<UUID, Values>();
+        }
+
+        public void nextTuple() {
+            Values values = new Values(sentences[index], System.currentTimeMillis());
+            UUID msgId = UUID.randomUUID();
+            this.pending.put(msgId, values);
+            this.collector.emit(values, msgId);
+            index++;
+            if (index >= sentences.length) {
+                index = 0;
+            }
+            count++;
+            total++;
+            if(count > 20000){
+                count = 0;
+                System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
+            }
+            Thread.yield();
+        }
+
+        public void ack(Object msgId) {
+            this.pending.remove(msgId);
+        }
+
+        public void fail(Object msgId) {
+            System.out.println("**** RESENDING FAILED TUPLE");
+            this.collector.emit(this.pending.get(msgId), msgId);
+        }
+    }
+
+    public static class MyBolt extends BaseRichBolt {
+
+        private HashMap<String, Long> counts = null;
+        private OutputCollector collector;
+
+        public void prepare(Map config, TopologyContext context, OutputCollector collector) {
+            this.counts = new HashMap<String, Long>();
+            this.collector = collector;
+        }
+
+        public void execute(Tuple tuple) {
+            collector.ack(tuple);
+        }
+
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+            // this bolt does not emit anything
+        }
+
+        @Override
+        public void cleanup() {
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/bolt/SequenceFileTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/bolt/SequenceFileTopology.java b/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/bolt/SequenceFileTopology.java
new file mode 100644
index 0000000..86bc698
--- /dev/null
+++ b/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/bolt/SequenceFileTopology.java
@@ -0,0 +1,202 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hdfs.bolt;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.OutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.topology.base.BaseRichBolt;
+import org.apache.storm.topology.base.BaseRichSpout;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.hdfs.bolt.format.*;
+import org.apache.storm.hdfs.bolt.rotation.FileRotationPolicy;
+import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy;
+import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy.Units;
+import org.apache.storm.hdfs.bolt.sync.CountSyncPolicy;
+import org.apache.storm.hdfs.bolt.sync.SyncPolicy;
+import org.apache.storm.hdfs.common.rotation.MoveFileAction;
+
+import org.apache.hadoop.io.SequenceFile;
+import org.yaml.snakeyaml.Yaml;
+
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class SequenceFileTopology {
+    static final String SENTENCE_SPOUT_ID = "sentence-spout";
+    static final String BOLT_ID = "my-bolt";
+    static final String TOPOLOGY_NAME = "test-topology";
+
+    public static void main(String[] args) throws Exception {
+        Config config = new Config();
+        config.setNumWorkers(1);
+
+        SentenceSpout spout = new SentenceSpout();
+
+        // sync the filesystem after every 1k tuples
+        SyncPolicy syncPolicy = new CountSyncPolicy(1000);
+
+        // rotate files when they reach 5MB
+        FileRotationPolicy rotationPolicy = new FileSizeRotationPolicy(5.0f, Units.MB);
+
+        FileNameFormat fileNameFormat = new DefaultFileNameFormat()
+                .withPath("/tmp/source/")
+                .withExtension(".seq");
+
+        // create sequence format instance.
+        DefaultSequenceFormat format = new DefaultSequenceFormat("timestamp", "sentence");
+
+        Yaml yaml = new Yaml();
+        InputStream in = new FileInputStream(args[1]);
+        Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in);
+        in.close();
+        config.put("hdfs.config", yamlConf);
+
+        SequenceFileBolt bolt = new SequenceFileBolt()
+                .withFsUrl(args[0])
+                .withConfigKey("hdfs.config")
+                .withFileNameFormat(fileNameFormat)
+                .withSequenceFormat(format)
+                .withRotationPolicy(rotationPolicy)
+                .withSyncPolicy(syncPolicy)
+                .withCompressionType(SequenceFile.CompressionType.RECORD)
+                .withCompressionCodec("deflate")
+                .addRotationAction(new MoveFileAction().toDestination("/tmp/dest/"));
+
+        TopologyBuilder builder = new TopologyBuilder();
+
+        builder.setSpout(SENTENCE_SPOUT_ID, spout, 1);
+        // SentenceSpout --> MyBolt
+        builder.setBolt(BOLT_ID, bolt, 4)
+                .shuffleGrouping(SENTENCE_SPOUT_ID);
+
+
+        if (args.length == 2) {
+            LocalCluster cluster = new LocalCluster();
+
+            cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
+            waitForSeconds(120);
+            cluster.killTopology(TOPOLOGY_NAME);
+            cluster.shutdown();
+            System.exit(0);
+        } else if(args.length == 3) {
+            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
+        } else{
+            System.out.println("Usage: SequenceFileTopology [hdfs url] [hdfs yaml config file] <topology name>");
+        }
+    }
+
+    public static void waitForSeconds(int seconds) {
+        try {
+            Thread.sleep(seconds * 1000);
+        } catch (InterruptedException e) {
+        }
+    }
+
+
+    public static class SentenceSpout extends BaseRichSpout {
+
+
+        private ConcurrentHashMap<UUID, Values> pending;
+        private SpoutOutputCollector collector;
+        private String[] sentences = {
+                "my dog has fleas",
+                "i like cold beverages",
+                "the dog ate my homework",
+                "don't have a cow man",
+                "i don't think i like fleas"
+        };
+        private int index = 0;
+        private int count = 0;
+        private long total = 0L;
+
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+            declarer.declare(new Fields("sentence", "timestamp"));
+        }
+
+        public void open(Map config, TopologyContext context,
+                         SpoutOutputCollector collector) {
+            this.collector = collector;
+            this.pending = new ConcurrentHashMap<UUID, Values>();
+        }
+
+        public void nextTuple() {
+            Values values = new Values(sentences[index], System.currentTimeMillis());
+            UUID msgId = UUID.randomUUID();
+            this.pending.put(msgId, values);
+            this.collector.emit(values, msgId);
+            index++;
+            if (index >= sentences.length) {
+                index = 0;
+            }
+            count++;
+            total++;
+            if(count > 20000){
+                count = 0;
+                System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
+            }
+            Thread.yield();
+        }
+
+        public void ack(Object msgId) {
+//            System.out.println("ACK");
+            this.pending.remove(msgId);
+        }
+
+        public void fail(Object msgId) {
+            System.out.println("**** RESENDING FAILED TUPLE");
+            this.collector.emit(this.pending.get(msgId), msgId);
+        }
+    }
+
+
+    public static class MyBolt extends BaseRichBolt {
+
+        private HashMap<String, Long> counts = null;
+        private OutputCollector collector;
+
+        public void prepare(Map config, TopologyContext context, OutputCollector collector) {
+            this.counts = new HashMap<String, Long>();
+            this.collector = collector;
+        }
+
+        public void execute(Tuple tuple) {
+            collector.ack(tuple);
+        }
+
+
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+            // this bolt does not emit anything
+        }
+
+        @Override
+        public void cleanup() {
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/FixedBatchSpout.java
----------------------------------------------------------------------
diff --git a/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/FixedBatchSpout.java b/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/FixedBatchSpout.java
new file mode 100644
index 0000000..76cc2aa
--- /dev/null
+++ b/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/FixedBatchSpout.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hdfs.trident;
+
+import org.apache.storm.Config;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.trident.operation.TridentCollector;
+import org.apache.storm.trident.spout.IBatchSpout;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class FixedBatchSpout implements IBatchSpout {
+
+    Fields fields;
+    List<Object>[] outputs;
+    int maxBatchSize;
+    HashMap<Long, List<List<Object>>> batches = new HashMap<Long, List<List<Object>>>();
+
+    public FixedBatchSpout(Fields fields, int maxBatchSize, List<Object>... outputs) {
+        this.fields = fields;
+        this.outputs = outputs;
+        this.maxBatchSize = maxBatchSize;
+    }
+
+    int index = 0;
+    boolean cycle = false;
+
+    public void setCycle(boolean cycle) {
+        this.cycle = cycle;
+    }
+
+    @Override
+    public void open(Map conf, TopologyContext context) {
+        index = 0;
+    }
+
+    @Override
+    public void emitBatch(long batchId, TridentCollector collector) {
+        List<List<Object>> batch = this.batches.get(batchId);
+        if(batch == null){
+            batch = new ArrayList<List<Object>>();
+            if(index>=outputs.length && cycle) {
+                index = 0;
+            }
+            for(int i=0; i < maxBatchSize; index++, i++) {
+                if(index == outputs.length){
+                    index=0;
+                }
+                batch.add(outputs[index]);
+            }
+            this.batches.put(batchId, batch);
+        }
+        for(List<Object> list : batch){
+            collector.emit(list);
+        }
+    }
+
+    @Override
+    public void ack(long batchId) {
+        this.batches.remove(batchId);
+    }
+
+    @Override
+    public void close() {
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        Config conf = new Config();
+        conf.setMaxTaskParallelism(1);
+        return conf;
+    }
+
+    @Override
+    public Fields getOutputFields() {
+        return fields;
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/TridentFileTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/TridentFileTopology.java b/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/TridentFileTopology.java
new file mode 100644
index 0000000..8f75c45
--- /dev/null
+++ b/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/TridentFileTopology.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hdfs.trident;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.hdfs.common.rotation.MoveFileAction;
+import org.apache.storm.hdfs.trident.format.*;
+import org.apache.storm.hdfs.trident.rotation.FileRotationPolicy;
+import org.apache.storm.hdfs.trident.rotation.FileSizeRotationPolicy;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentState;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.state.StateFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.util.Map;
+
+public class TridentFileTopology {
+
+    public static StormTopology buildTopology(String hdfsUrl){
+        FixedBatchSpout spout = new FixedBatchSpout(new Fields("sentence", "key"), 1000, new Values("the cow jumped over the moon", 1l),
+                new Values("the man went to the store and bought some candy", 2l), new Values("four score and seven years ago", 3l),
+                new Values("how many apples can you eat", 4l), new Values("to be or not to be the person", 5l));
+        spout.setCycle(true);
+
+        TridentTopology topology = new TridentTopology();
+        Stream stream = topology.newStream("spout1", spout);
+
+        Fields hdfsFields = new Fields("sentence", "key");
+
+        FileNameFormat fileNameFormat = new DefaultFileNameFormat()
+                .withPath("/tmp/trident")
+                .withPrefix("trident")
+                .withExtension(".txt");
+
+        RecordFormat recordFormat = new DelimitedRecordFormat()
+                .withFields(hdfsFields);
+
+        FileRotationPolicy rotationPolicy = new FileSizeRotationPolicy(5.0f, FileSizeRotationPolicy.Units.MB);
+
+        HdfsState.Options options = new HdfsState.HdfsFileOptions()
+                .withFileNameFormat(fileNameFormat)
+                .withRecordFormat(recordFormat)
+                .withRotationPolicy(rotationPolicy)
+                .withFsUrl(hdfsUrl)
+                .withConfigKey("hdfs.config");
+
+        StateFactory factory = new HdfsStateFactory().withOptions(options);
+
+        TridentState state = stream
+                .partitionPersist(factory, hdfsFields, new HdfsUpdater(), new Fields());
+
+        return topology.build();
+    }
+
+    public static void main(String[] args) throws Exception {
+        Config conf = new Config();
+        conf.setMaxSpoutPending(5);
+
+        Yaml yaml = new Yaml();
+        InputStream in = new FileInputStream(args[1]);
+        Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in);
+        in.close();
+        conf.put("hdfs.config", yamlConf);
+
+        if (args.length == 2) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("wordCounter", conf, buildTopology(args[0]));
+            Thread.sleep(120 * 1000);
+        } else if(args.length == 3) {
+            conf.setNumWorkers(3);
+            StormSubmitter.submitTopology(args[2], conf, buildTopology(args[0]));
+        } else{
+            System.out.println("Usage: TridentFileTopology [hdfs url] [hdfs yaml config file] <topology name>");
+        }
+    }
+}


[03/10] storm git commit: STORM-1970: external project examples refator

Posted by ka...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/TridentFileTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/TridentFileTopology.java b/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/TridentFileTopology.java
deleted file mode 100644
index 8f75c45..0000000
--- a/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/TridentFileTopology.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hdfs.trident;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.hdfs.common.rotation.MoveFileAction;
-import org.apache.storm.hdfs.trident.format.*;
-import org.apache.storm.hdfs.trident.rotation.FileRotationPolicy;
-import org.apache.storm.hdfs.trident.rotation.FileSizeRotationPolicy;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentState;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.state.StateFactory;
-import org.yaml.snakeyaml.Yaml;
-
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.util.Map;
-
-public class TridentFileTopology {
-
-    public static StormTopology buildTopology(String hdfsUrl){
-        FixedBatchSpout spout = new FixedBatchSpout(new Fields("sentence", "key"), 1000, new Values("the cow jumped over the moon", 1l),
-                new Values("the man went to the store and bought some candy", 2l), new Values("four score and seven years ago", 3l),
-                new Values("how many apples can you eat", 4l), new Values("to be or not to be the person", 5l));
-        spout.setCycle(true);
-
-        TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("spout1", spout);
-
-        Fields hdfsFields = new Fields("sentence", "key");
-
-        FileNameFormat fileNameFormat = new DefaultFileNameFormat()
-                .withPath("/tmp/trident")
-                .withPrefix("trident")
-                .withExtension(".txt");
-
-        RecordFormat recordFormat = new DelimitedRecordFormat()
-                .withFields(hdfsFields);
-
-        FileRotationPolicy rotationPolicy = new FileSizeRotationPolicy(5.0f, FileSizeRotationPolicy.Units.MB);
-
-        HdfsState.Options options = new HdfsState.HdfsFileOptions()
-                .withFileNameFormat(fileNameFormat)
-                .withRecordFormat(recordFormat)
-                .withRotationPolicy(rotationPolicy)
-                .withFsUrl(hdfsUrl)
-                .withConfigKey("hdfs.config");
-
-        StateFactory factory = new HdfsStateFactory().withOptions(options);
-
-        TridentState state = stream
-                .partitionPersist(factory, hdfsFields, new HdfsUpdater(), new Fields());
-
-        return topology.build();
-    }
-
-    public static void main(String[] args) throws Exception {
-        Config conf = new Config();
-        conf.setMaxSpoutPending(5);
-
-        Yaml yaml = new Yaml();
-        InputStream in = new FileInputStream(args[1]);
-        Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in);
-        in.close();
-        conf.put("hdfs.config", yamlConf);
-
-        if (args.length == 2) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("wordCounter", conf, buildTopology(args[0]));
-            Thread.sleep(120 * 1000);
-        } else if(args.length == 3) {
-            conf.setNumWorkers(3);
-            StormSubmitter.submitTopology(args[2], conf, buildTopology(args[0]));
-        } else{
-            System.out.println("Usage: TridentFileTopology [hdfs url] [hdfs yaml config file] <topology name>");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/TridentSequenceTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/TridentSequenceTopology.java b/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/TridentSequenceTopology.java
deleted file mode 100644
index 788b33c..0000000
--- a/external/storm-hdfs/src/test/java/org/apache/storm/hdfs/trident/TridentSequenceTopology.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hdfs.trident;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.hdfs.common.rotation.MoveFileAction;
-import org.apache.storm.hdfs.trident.format.*;
-import org.apache.storm.hdfs.trident.rotation.FileRotationPolicy;
-import org.apache.storm.hdfs.trident.rotation.FileSizeRotationPolicy;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentState;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.state.StateFactory;
-import org.yaml.snakeyaml.Yaml;
-
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.util.Map;
-
-public class TridentSequenceTopology {
-
-    public static StormTopology buildTopology(String hdfsUrl){
-        FixedBatchSpout spout = new FixedBatchSpout(new Fields("sentence", "key"), 1000, new Values("the cow jumped over the moon", 1l),
-                new Values("the man went to the store and bought some candy", 2l), new Values("four score and seven years ago", 3l),
-                new Values("how many apples can you eat", 4l), new Values("to be or not to be the person", 5l));
-        spout.setCycle(true);
-
-        TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("spout1", spout);
-
-        Fields hdfsFields = new Fields("sentence", "key");
-
-        FileNameFormat fileNameFormat = new DefaultFileNameFormat()
-                .withPath("/tmp/trident")
-                .withPrefix("trident")
-                .withExtension(".seq");
-
-        FileRotationPolicy rotationPolicy = new FileSizeRotationPolicy(5.0f, FileSizeRotationPolicy.Units.MB);
-
-        HdfsState.Options seqOpts = new HdfsState.SequenceFileOptions()
-                .withFileNameFormat(fileNameFormat)
-                .withSequenceFormat(new DefaultSequenceFormat("key", "sentence"))
-                .withRotationPolicy(rotationPolicy)
-                .withFsUrl(hdfsUrl)
-                .withConfigKey("hdfs.config")
-                .addRotationAction(new MoveFileAction().toDestination("/tmp/dest2/"));
-        StateFactory factory = new HdfsStateFactory().withOptions(seqOpts);
-
-        TridentState state = stream
-                .partitionPersist(factory, hdfsFields, new HdfsUpdater(), new Fields());
-
-        return topology.build();
-    }
-
-    public static void main(String[] args) throws Exception {
-        Config conf = new Config();
-        conf.setMaxSpoutPending(5);
-
-        Yaml yaml = new Yaml();
-        InputStream in = new FileInputStream(args[1]);
-        Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in);
-        in.close();
-        conf.put("hdfs.config", yamlConf);
-
-        if (args.length == 2) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("wordCounter", conf, buildTopology(args[0]));
-            Thread.sleep(120 * 1000);
-        } else if(args.length == 3) {
-            conf.setNumWorkers(3);
-            StormSubmitter.submitTopology(args[2], conf, buildTopology(args[0]));
-        } else{
-            System.out.println("Usage: TridentSequenceTopology [hdfs url] [hdfs yaml config file] <topology name>");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java b/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java
deleted file mode 100644
index 607bd61..0000000
--- a/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.hive.bolt;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.topology.base.BaseRichSpout;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.utils.MockTupleHelpers;
-
-import org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper;
-import org.apache.storm.hive.common.HiveOptions;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Map;
-import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-
-
-public class BucketTestHiveTopology {
-    static final String USER_SPOUT_ID = "user-spout";
-    static final String BOLT_ID = "my-hive-bolt";
-    static final String TOPOLOGY_NAME = "hive-test-topology1";
-
-    public static void main(String[] args) throws Exception {
-        if ((args == null) || (args.length < 7)) {
-            System.out.println("Usage: BucketTestHiveTopology metastoreURI "
-                    + "dbName tableName dataFileLocation hiveBatchSize " +
-                    "hiveTickTupl]eIntervalSecs workers  [topologyNamey] [keytab file]"
-                    + " [principal name] ");
-            System.exit(1);
-        }
-        String metaStoreURI = args[0];
-        String dbName = args[1];
-        String tblName = args[2];
-        String sourceFileLocation = args[3];
-        Integer hiveBatchSize = Integer.parseInt(args[4]);
-        Integer hiveTickTupleIntervalSecs = Integer.parseInt(args[5]);
-        Integer workers = Integer.parseInt(args[6]);
-        String[] colNames = { "ss_sold_date_sk", "ss_sold_time_sk", "ss_item_sk",
-                "ss_customer_sk", "ss_cdemo_sk", "ss_hdemo_sk", "ss_addr_sk",
-                "ss_store_sk", "ss_promo_sk", "ss_ticket_number", "ss_quantity",
-                "ss_wholesale_cost", "ss_list_price", "ss_sales_price",
-                "ss_ext_discount_amt", "ss_ext_sales_price",
-                "ss_ext_wholesale_cost", "ss_ext_list_price", "ss_ext_tax",
-                "ss_coupon_amt", "ss_net_paid", "ss_net_paid_inc_tax",
-                "ss_net_profit" };
-        Config config = new Config();
-        config.setNumWorkers(workers);
-        UserDataSpout spout = new UserDataSpout().withDataFile(sourceFileLocation);
-        DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper()
-                .withColumnFields(new Fields(colNames)).withTimeAsPartitionField("yyyy/MM/dd");
-        HiveOptions hiveOptions;
-        hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
-            .withTxnsPerBatch(10)
-            .withBatchSize(hiveBatchSize);
-        // doing below because its affecting storm metrics most likely
-        // had to make tick tuple a mandatory argument since its positional
-        if (hiveTickTupleIntervalSecs > 0) {
-            hiveOptions.withTickTupleInterval(hiveTickTupleIntervalSecs);
-        }
-        if (args.length == 10) {
-            hiveOptions.withKerberosKeytab(args[8]).withKerberosPrincipal(args[9]);
-        }
-        HiveBolt hiveBolt = new HiveBolt(hiveOptions);
-        TopologyBuilder builder = new TopologyBuilder();
-        builder.setSpout(USER_SPOUT_ID, spout, 1);
-        // SentenceSpout --> MyBolt
-        builder.setBolt(BOLT_ID, hiveBolt, 14)
-                .shuffleGrouping(USER_SPOUT_ID);
-        if (args.length == 6) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
-            waitForSeconds(20);
-            cluster.killTopology(TOPOLOGY_NAME);
-            System.out.println("cluster begin to shutdown");
-            cluster.shutdown();
-            System.out.println("cluster shutdown");
-            System.exit(0);
-        } else {
-            StormSubmitter.submitTopology(args[7], config, builder.createTopology());
-        }
-    }
-
-    public static void waitForSeconds(int seconds) {
-        try {
-            Thread.sleep(seconds * 1000);
-        } catch (InterruptedException e) {
-        }
-    }
-
-    public static class UserDataSpout extends BaseRichSpout {
-        private ConcurrentHashMap<UUID, Values> pending;
-        private SpoutOutputCollector collector;
-        private String filePath;
-        private BufferedReader br;
-        private int count = 0;
-        private long total = 0L;
-        private String[] outputFields = { "ss_sold_date_sk", "ss_sold_time_sk",
-                "ss_item_sk", "ss_customer_sk", "ss_cdemo_sk", "ss_hdemo_sk",
-                "ss_addr_sk", "ss_store_sk", "ss_promo_sk", "ss_ticket_number",
-                "ss_quantity", "ss_wholesale_cost", "ss_list_price",
-                "ss_sales_price", "ss_ext_discount_amt", "ss_ext_sales_price",
-                "ss_ext_wholesale_cost", "ss_ext_list_price", "ss_ext_tax",
-                "ss_coupon_amt", "ss_net_paid", "ss_net_paid_inc_tax",
-                "ss_net_profit" };
-
-        public UserDataSpout withDataFile (String filePath) {
-            this.filePath = filePath;
-            return this;
-        }
-
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            declarer.declare(new Fields(this.outputFields));
-        }
-
-        public void open(Map config, TopologyContext context,
-                         SpoutOutputCollector collector) {
-            this.collector = collector;
-            this.pending = new ConcurrentHashMap<UUID, Values>();
-            try {
-                this.br = new BufferedReader(new FileReader(new File(this
-                        .filePath)));
-            } catch (Exception ex) {
-                ex.printStackTrace();
-            }
-        }
-
-        public void nextTuple() {
-            String line;
-            try {
-                if ((line = br.readLine()) != null) {
-                    System.out.println("*********" + line);
-                    String[] values = line.split("\\|", -1);
-                    // above gives an extra empty string at the end. below
-                    // removes that
-                    values = Arrays.copyOfRange(values, 0,
-                            this.outputFields.length);
-                    Values tupleValues = new Values(values);
-                    UUID msgId = UUID.randomUUID();
-                    this.pending.put(msgId, tupleValues);
-                    this.collector.emit(tupleValues, msgId);
-                    count++;
-                    total++;
-                    if (count > 1000) {
-                        count = 0;
-                        System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
-                    }
-                }
-            } catch (IOException ex) {
-                ex.printStackTrace();
-            }
-        }
-
-        public void ack(Object msgId) {
-            this.pending.remove(msgId);
-        }
-
-        public void fail(Object msgId) {
-            System.out.println("**** RESENDING FAILED TUPLE");
-            this.collector.emit(this.pending.get(msgId), msgId);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/HiveTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/HiveTopology.java b/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/HiveTopology.java
deleted file mode 100644
index 4afd298..0000000
--- a/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/HiveTopology.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.hive.bolt;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.topology.base.BaseRichSpout;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-
-import org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper;
-import org.apache.storm.hive.common.HiveOptions;
-
-import java.util.Map;
-import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-
-
-public class HiveTopology {
-    static final String USER_SPOUT_ID = "user-spout";
-    static final String BOLT_ID = "my-hive-bolt";
-    static final String TOPOLOGY_NAME = "hive-test-topology1";
-
-    public static void main(String[] args) throws Exception {
-        String metaStoreURI = args[0];
-        String dbName = args[1];
-        String tblName = args[2];
-        String[] colNames = {"id","name","phone","street","city","state"};
-        Config config = new Config();
-        config.setNumWorkers(1);
-        UserDataSpout spout = new UserDataSpout();
-        DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper()
-                .withColumnFields(new Fields(colNames));
-        HiveOptions hiveOptions;
-        if (args.length == 6) {
-            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
-                .withTxnsPerBatch(10)
-                .withBatchSize(100)
-                .withIdleTimeout(10)
-                .withKerberosKeytab(args[4])
-                .withKerberosPrincipal(args[5]);
-        } else {
-            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
-                .withTxnsPerBatch(10)
-                .withBatchSize(100)
-                .withIdleTimeout(10)
-                .withMaxOpenConnections(1);
-        }
-
-        HiveBolt hiveBolt = new HiveBolt(hiveOptions);
-        TopologyBuilder builder = new TopologyBuilder();
-        builder.setSpout(USER_SPOUT_ID, spout, 1);
-        // SentenceSpout --> MyBolt
-        builder.setBolt(BOLT_ID, hiveBolt, 1)
-                .shuffleGrouping(USER_SPOUT_ID);
-        if (args.length == 3) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
-            waitForSeconds(20);
-            cluster.killTopology(TOPOLOGY_NAME);
-            System.out.println("cluster begin to shutdown");
-            cluster.shutdown();
-            System.out.println("cluster shutdown");
-            System.exit(0);
-        } else if(args.length >= 4) {
-            StormSubmitter.submitTopology(args[3], config, builder.createTopology());
-        } else {
-            System.out.println("Usage: HiveTopology metastoreURI dbName tableName [topologyNamey] [keytab file] [principal name]");
-        }
-    }
-
-    public static void waitForSeconds(int seconds) {
-        try {
-            Thread.sleep(seconds * 1000);
-        } catch (InterruptedException e) {
-        }
-    }
-
-    public static class UserDataSpout extends BaseRichSpout {
-        private ConcurrentHashMap<UUID, Values> pending;
-        private SpoutOutputCollector collector;
-        private String[] sentences = {
-                "1,user1,123456,street1,sunnyvale,ca",
-                "2,user2,123456,street2,sunnyvale,ca",
-                "3,user3,123456,street3,san jose,ca",
-                "4,user4,123456,street4,san jose,ca",
-        };
-        private int index = 0;
-        private int count = 0;
-        private long total = 0L;
-
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            declarer.declare(new Fields("id","name","phone","street","city","state"));
-        }
-
-        public void open(Map config, TopologyContext context,
-                         SpoutOutputCollector collector) {
-            this.collector = collector;
-            this.pending = new ConcurrentHashMap<UUID, Values>();
-        }
-
-        public void nextTuple() {
-            String[] user = sentences[index].split(",");
-            Values values = new Values(Integer.parseInt(user[0]),user[1],user[2],user[3],user[4],user[5]);
-            UUID msgId = UUID.randomUUID();
-            this.pending.put(msgId, values);
-            this.collector.emit(values, msgId);
-            index++;
-            if (index >= sentences.length) {
-                index = 0;
-            }
-            count++;
-            total++;
-            if(count > 1000){
-                count = 0;
-                System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
-            }
-            Thread.yield();
-        }
-
-        public void ack(Object msgId) {
-            this.pending.remove(msgId);
-        }
-
-        public void fail(Object msgId) {
-            System.out.println("**** RESENDING FAILED TUPLE");
-            this.collector.emit(this.pending.get(msgId), msgId);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java
----------------------------------------------------------------------
diff --git a/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java b/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java
deleted file mode 100644
index a52c490..0000000
--- a/external/storm-hive/src/test/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.hive.bolt;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.topology.base.BaseRichSpout;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.utils.Utils;
-
-import org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper;
-import org.apache.storm.hive.common.HiveOptions;
-
-import java.util.Map;
-import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-
-
-public class HiveTopologyPartitioned {
-    static final String USER_SPOUT_ID = "hive-user-spout-partitioned";
-    static final String BOLT_ID = "my-hive-bolt-partitioned";
-    static final String TOPOLOGY_NAME = "hive-test-topology-partitioned";
-
-    public static void main(String[] args) throws Exception {
-        String metaStoreURI = args[0];
-        String dbName = args[1];
-        String tblName = args[2];
-        String[] partNames = {"city","state"};
-        String[] colNames = {"id","name","phone","street"};
-        Config config = new Config();
-        config.setNumWorkers(1);
-        UserDataSpout spout = new UserDataSpout();
-        DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper()
-            .withColumnFields(new Fields(colNames))
-            .withPartitionFields(new Fields(partNames));
-        HiveOptions hiveOptions;
-        if (args.length == 6) {
-            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
-                .withTxnsPerBatch(10)
-                .withBatchSize(1000)
-                .withIdleTimeout(10)
-                .withKerberosKeytab(args[4])
-                .withKerberosPrincipal(args[5]);
-        } else {
-            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
-                .withTxnsPerBatch(10)
-                .withBatchSize(1000)
-                .withIdleTimeout(10);
-        }
-
-        HiveBolt hiveBolt = new HiveBolt(hiveOptions);
-        TopologyBuilder builder = new TopologyBuilder();
-        builder.setSpout(USER_SPOUT_ID, spout, 1);
-        // SentenceSpout --> MyBolt
-        builder.setBolt(BOLT_ID, hiveBolt, 1)
-                .shuffleGrouping(USER_SPOUT_ID);
-        if (args.length == 3) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
-            waitForSeconds(20);
-            cluster.killTopology(TOPOLOGY_NAME);
-            System.out.println("cluster begin to shutdown");
-            cluster.shutdown();
-            System.out.println("cluster shutdown");
-            System.exit(0);
-        } else if(args.length >= 4) {
-            StormSubmitter.submitTopology(args[3], config, builder.createTopology());
-        } else {
-            System.out.println("Usage: HiveTopologyPartitioned metastoreURI dbName tableName [topologyNamey] [keytab file] [principal name]");
-        }
-    }
-
-    public static void waitForSeconds(int seconds) {
-        try {
-            Thread.sleep(seconds * 1000);
-        } catch (InterruptedException e) {
-        }
-    }
-
-    public static class UserDataSpout extends BaseRichSpout {
-        private ConcurrentHashMap<UUID, Values> pending;
-        private SpoutOutputCollector collector;
-        private String[] sentences = {
-                "1,user1,123456,street1,sunnyvale,ca",
-                "2,user2,123456,street2,sunnyvale,ca",
-                "3,user3,123456,street3,san jose,ca",
-                "4,user4,123456,street4,san jose,ca",
-        };
-        private int index = 0;
-        private int count = 0;
-        private long total = 0L;
-
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            declarer.declare(new Fields("id","name","phone","street","city","state"));
-        }
-
-        public void open(Map config, TopologyContext context,
-                         SpoutOutputCollector collector) {
-            this.collector = collector;
-            this.pending = new ConcurrentHashMap<UUID, Values>();
-        }
-
-        public void nextTuple() {
-            String[] user = sentences[index].split(",");
-            Values values = new Values(Integer.parseInt(user[0]),user[1],user[2],user[3],user[4],user[5]);
-            UUID msgId = UUID.randomUUID();
-            this.pending.put(msgId, values);
-            this.collector.emit(values, msgId);
-            index++;
-            if (index >= sentences.length) {
-                index = 0;
-            }
-            count++;
-            total++;
-            if(count > 1000){
-		Utils.sleep(1000);
-                count = 0;
-                System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
-            }
-        }
-
-        public void ack(Object msgId) {
-            this.pending.remove(msgId);
-        }
-
-        public void fail(Object msgId) {
-            System.out.println("**** RESENDING FAILED TUPLE");
-            this.collector.emit(this.pending.get(msgId), msgId);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-hive/src/test/java/org/apache/storm/hive/trident/TridentHiveTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-hive/src/test/java/org/apache/storm/hive/trident/TridentHiveTopology.java b/external/storm-hive/src/test/java/org/apache/storm/hive/trident/TridentHiveTopology.java
deleted file mode 100644
index 86a35e6..0000000
--- a/external/storm-hive/src/test/java/org/apache/storm/hive/trident/TridentHiveTopology.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.hive.trident;
-
-
-import org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper;
-import org.apache.storm.hive.common.HiveOptions;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.hooks.SubmitterHookException;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.trident.operation.TridentCollector;
-import org.apache.storm.trident.spout.IBatchSpout;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentState;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.state.StateFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-public class TridentHiveTopology {
-    private static final Logger LOG = LoggerFactory.getLogger(TridentHiveTopology.class);
-
-    public static StormTopology buildTopology(String metaStoreURI, String dbName, String tblName, Object keytab, Object principal) {
-        int batchSize = 100;
-        FixedBatchSpout spout = new FixedBatchSpout(batchSize);
-        spout.setCycle(true);
-        TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("hiveTridentspout1",spout);
-        String[] partNames = {"city","state"};
-        String[] colNames = {"id","name","phone","street"};
-        Fields hiveFields = new Fields("id","name","phone","street","city","state");
-        DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper()
-            .withColumnFields(new Fields(colNames))
-            .withPartitionFields(new Fields(partNames));
-        HiveOptions hiveOptions;
-        if (keytab != null && principal != null) {
-            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
-                .withTxnsPerBatch(10)
-                .withBatchSize(batchSize)
-                .withIdleTimeout(10)
-                .withCallTimeout(30000)
-                .withKerberosKeytab((String)keytab)
-                .withKerberosPrincipal((String)principal);
-        } else  {
-            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
-                .withTxnsPerBatch(10)
-                .withBatchSize(batchSize)
-                .withCallTimeout(30000)
-                .withIdleTimeout(10);
-        }
-        StateFactory factory = new HiveStateFactory().withOptions(hiveOptions);
-        TridentState state = stream.partitionPersist(factory, hiveFields, new HiveUpdater(), new Fields());
-        return topology.build();
-    }
-
-    public static void waitForSeconds(int seconds) {
-        try {
-            Thread.sleep(seconds * 1000);
-        } catch (InterruptedException e) {
-        }
-    }
-
-    public static void main(String[] args) {
-        String metaStoreURI = args[0];
-        String dbName = args[1];
-        String tblName = args[2];
-        Config conf = new Config();
-        conf.setMaxSpoutPending(5);
-        if(args.length == 3) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("tridentHiveTopology", conf, buildTopology(metaStoreURI, dbName, tblName,null,null));
-            LOG.info("waiting for 60 seconds");
-            waitForSeconds(60);
-            LOG.info("killing topology");
-            cluster.killTopology("tridenHiveTopology");
-            LOG.info("cluster shutdown");
-            cluster.shutdown();
-            LOG.info("cluster shutdown");
-            System.exit(0);
-        } else if(args.length == 4) {
-            try {
-                StormSubmitter.submitTopology(args[3], conf, buildTopology(metaStoreURI, dbName, tblName,null,null));
-            } catch(SubmitterHookException e) {
-                LOG.warn("Topology is submitted but invoking ISubmitterHook failed", e);
-            } catch (Exception e) {
-                LOG.warn("Failed to submit topology ", e);
-            }
-        } else if (args.length == 6) {
-            try {
-                StormSubmitter.submitTopology(args[3], conf, buildTopology(metaStoreURI, dbName, tblName,args[4],args[5]));
-            } catch(SubmitterHookException e) {
-                LOG.warn("Topology is submitted but invoking ISubmitterHook failed", e);
-            } catch (Exception e) {
-                LOG.warn("Failed to submit topology ", e);
-            }
-        } else {
-            LOG.info("Usage: TridentHiveTopology metastoreURI dbName tableName [topologyNamey]");
-        }
-    }
-
-    public static class FixedBatchSpout implements IBatchSpout {
-        int maxBatchSize;
-        HashMap<Long, List<List<Object>>> batches = new HashMap<Long, List<List<Object>>>();
-        private Values[] outputs = {
-            new Values("1","user1","123456","street1","sunnyvale","ca"),
-            new Values("2","user2","123456","street2","sunnyvale","ca"),
-            new Values("3","user3","123456","street3","san jose","ca"),
-            new Values("4","user4","123456","street4","san jose","ca"),
-        };
-        private int index = 0;
-        boolean cycle = false;
-
-        public FixedBatchSpout(int maxBatchSize) {
-            this.maxBatchSize = maxBatchSize;
-        }
-
-        public void setCycle(boolean cycle) {
-            this.cycle = cycle;
-        }
-
-        @Override
-        public Fields getOutputFields() {
-            return new Fields("id","name","phone","street","city","state");
-        }
-
-        @Override
-        public void open(Map conf, TopologyContext context) {
-            index = 0;
-        }
-
-        @Override
-        public void emitBatch(long batchId, TridentCollector collector) {
-            List<List<Object>> batch = this.batches.get(batchId);
-            if(batch == null){
-                batch = new ArrayList<List<Object>>();
-                if(index>=outputs.length && cycle) {
-                    index = 0;
-                }
-                for(int i=0; i < maxBatchSize; index++, i++) {
-                    if(index == outputs.length){
-                        index=0;
-                    }
-                    batch.add(outputs[index]);
-                }
-                this.batches.put(batchId, batch);
-            }
-            for(List<Object> list : batch){
-                collector.emit(list);
-            }
-        }
-
-        @Override
-        public void ack(long batchId) {
-            this.batches.remove(batchId);
-        }
-
-        @Override
-        public void close() {
-        }
-
-        @Override
-        public Map getComponentConfiguration() {
-            Config conf = new Config();
-            conf.setMaxTaskParallelism(1);
-            return conf;
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/spout/UserSpout.java
----------------------------------------------------------------------
diff --git a/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/spout/UserSpout.java b/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/spout/UserSpout.java
deleted file mode 100644
index fdcd053..0000000
--- a/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/spout/UserSpout.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.jdbc.spout;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import com.google.common.collect.Lists;
-
-import java.util.*;
-
-public class UserSpout implements IRichSpout {
-    boolean isDistributed;
-    SpoutOutputCollector collector;
-    public static final List<Values> rows = Lists.newArrayList(
-            new Values(1,"peter",System.currentTimeMillis()),
-            new Values(2,"bob",System.currentTimeMillis()),
-            new Values(3,"alice",System.currentTimeMillis()));
-
-    public UserSpout() {
-        this(true);
-    }
-
-    public UserSpout(boolean isDistributed) {
-        this.isDistributed = isDistributed;
-    }
-
-    public boolean isDistributed() {
-        return this.isDistributed;
-    }
-
-    @SuppressWarnings("rawtypes")
-    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-        this.collector = collector;
-    }
-
-    public void close() {
-
-    }
-
-    public void nextTuple() {
-        final Random rand = new Random();
-        final Values row = rows.get(rand.nextInt(rows.size() - 1));
-        this.collector.emit(row);
-        Thread.yield();
-    }
-
-    public void ack(Object msgId) {
-
-    }
-
-    public void fail(Object msgId) {
-
-    }
-
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(new Fields("user_id","user_name","create_date"));
-    }
-
-    @Override
-    public void activate() {
-    }
-
-    @Override
-    public void deactivate() {
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java b/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java
deleted file mode 100644
index ec7ca36..0000000
--- a/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.jdbc.topology;
-
-import org.apache.storm.Config;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import org.apache.storm.jdbc.common.Column;
-import org.apache.storm.jdbc.common.ConnectionProvider;
-import org.apache.storm.jdbc.common.HikariCPConnectionProvider;
-import org.apache.storm.jdbc.common.JdbcClient;
-import org.apache.storm.jdbc.mapper.JdbcMapper;
-import org.apache.storm.jdbc.mapper.JdbcLookupMapper;
-import org.apache.storm.jdbc.mapper.SimpleJdbcMapper;
-import org.apache.storm.jdbc.mapper.SimpleJdbcLookupMapper;
-import org.apache.storm.jdbc.spout.UserSpout;
-import org.apache.storm.LocalCluster;
-
-import java.sql.Types;
-import java.util.List;
-import java.util.Map;
-
-public abstract class AbstractUserTopology {
-    private static final List<String> setupSqls = Lists.newArrayList(
-            "drop table if exists user",
-            "drop table if exists department",
-            "drop table if exists user_department",
-            "create table if not exists user (user_id integer, user_name varchar(100), dept_name varchar(100), create_date date)",
-            "create table if not exists department (dept_id integer, dept_name varchar(100))",
-            "create table if not exists user_department (user_id integer, dept_id integer)",
-            "insert into department values (1, 'R&D')",
-            "insert into department values (2, 'Finance')",
-            "insert into department values (3, 'HR')",
-            "insert into department values (4, 'Sales')",
-            "insert into user_department values (1, 1)",
-            "insert into user_department values (2, 2)",
-            "insert into user_department values (3, 3)",
-            "insert into user_department values (4, 4)"
-    );
-    protected UserSpout userSpout;
-    protected JdbcMapper jdbcMapper;
-    protected JdbcLookupMapper jdbcLookupMapper;
-    protected ConnectionProvider connectionProvider;
-
-    protected static final String TABLE_NAME = "user";
-    protected static final String JDBC_CONF = "jdbc.conf";
-    protected static final String SELECT_QUERY = "select dept_name from department, user_department where department.dept_id = user_department.dept_id" +
-            " and user_department.user_id = ?";
-
-    public void execute(String[] args) throws Exception {
-        if (args.length != 4 && args.length != 5) {
-            System.out.println("Usage: " + this.getClass().getSimpleName() + " <dataSourceClassName> <dataSource.url> "
-                    + "<user> <password> [topology name]");
-            System.exit(-1);
-        }
-        Map map = Maps.newHashMap();
-        map.put("dataSourceClassName", args[0]);//com.mysql.jdbc.jdbc2.optional.MysqlDataSource
-        map.put("dataSource.url", args[1]);//jdbc:mysql://localhost/test
-        map.put("dataSource.user", args[2]);//root
-
-        if(args.length == 4) {
-            map.put("dataSource.password", args[3]);//password
-        }
-
-        Config config = new Config();
-        config.put(JDBC_CONF, map);
-
-        ConnectionProvider connectionProvider = new HikariCPConnectionProvider(map);
-        connectionProvider.prepare();
-        int queryTimeoutSecs = 60;
-        JdbcClient jdbcClient = new JdbcClient(connectionProvider, queryTimeoutSecs);
-        for (String sql : setupSqls) {
-            jdbcClient.executeSql(sql);
-        }
-
-        this.userSpout = new UserSpout();
-        this.jdbcMapper = new SimpleJdbcMapper(TABLE_NAME, connectionProvider);
-        connectionProvider.cleanup();
-        Fields outputFields = new Fields("user_id", "user_name", "dept_name", "create_date");
-        List<Column> queryParamColumns = Lists.newArrayList(new Column("user_id", Types.INTEGER));
-        this.jdbcLookupMapper = new SimpleJdbcLookupMapper(outputFields, queryParamColumns);
-        this.connectionProvider = new HikariCPConnectionProvider(map);
-        if (args.length == 4) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test", config, getTopology());
-            Thread.sleep(30000);
-            cluster.killTopology("test");
-            cluster.shutdown();
-            System.exit(0);
-        } else {
-            StormSubmitter.submitTopology(args[4], config, getTopology());
-        }
-    }
-
-    public abstract StormTopology getTopology();
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/UserPersistanceTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/UserPersistanceTopology.java b/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/UserPersistanceTopology.java
deleted file mode 100644
index 1915219..0000000
--- a/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/UserPersistanceTopology.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.jdbc.topology;
-
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.topology.TopologyBuilder;
-import com.google.common.collect.Lists;
-import org.apache.storm.jdbc.bolt.JdbcInsertBolt;
-import org.apache.storm.jdbc.bolt.JdbcLookupBolt;
-import org.apache.storm.jdbc.common.Column;
-import org.apache.storm.jdbc.mapper.JdbcMapper;
-import org.apache.storm.jdbc.mapper.SimpleJdbcMapper;
-
-import java.sql.Types;
-import java.util.List;
-
-
-public class UserPersistanceTopology extends AbstractUserTopology {
-    private static final String USER_SPOUT = "USER_SPOUT";
-    private static final String LOOKUP_BOLT = "LOOKUP_BOLT";
-    private static final String PERSISTANCE_BOLT = "PERSISTANCE_BOLT";
-
-    public static void main(String[] args) throws Exception {
-        new UserPersistanceTopology().execute(args);
-    }
-
-    @Override
-    public StormTopology getTopology() {
-        JdbcLookupBolt departmentLookupBolt = new JdbcLookupBolt(connectionProvider, SELECT_QUERY, this.jdbcLookupMapper);
-
-        //must specify column schema when providing custom query.
-        List<Column> schemaColumns = Lists.newArrayList(new Column("create_date", Types.DATE),
-                new Column("dept_name", Types.VARCHAR), new Column("user_id", Types.INTEGER), new Column("user_name", Types.VARCHAR));
-        JdbcMapper mapper = new SimpleJdbcMapper(schemaColumns);
-
-        JdbcInsertBolt userPersistanceBolt = new JdbcInsertBolt(connectionProvider, mapper)
-                .withInsertQuery("insert into user (create_date, dept_name, user_id, user_name) values (?,?,?,?)");
-
-        // userSpout ==> jdbcBolt
-        TopologyBuilder builder = new TopologyBuilder();
-
-        builder.setSpout(USER_SPOUT, this.userSpout, 1);
-        builder.setBolt(LOOKUP_BOLT, departmentLookupBolt, 1).shuffleGrouping(USER_SPOUT);
-        builder.setBolt(PERSISTANCE_BOLT, userPersistanceBolt, 1).shuffleGrouping(LOOKUP_BOLT);
-        return builder.createTopology();
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/UserPersistanceTridentTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/UserPersistanceTridentTopology.java b/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/UserPersistanceTridentTopology.java
deleted file mode 100644
index 11269c3..0000000
--- a/external/storm-jdbc/src/test/java/org/apache/storm/jdbc/topology/UserPersistanceTridentTopology.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.jdbc.topology;
-
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import com.google.common.collect.Lists;
-import org.apache.storm.jdbc.common.Column;
-import org.apache.storm.jdbc.mapper.SimpleJdbcLookupMapper;
-import org.apache.storm.jdbc.spout.UserSpout;
-import org.apache.storm.jdbc.trident.state.JdbcQuery;
-import org.apache.storm.jdbc.trident.state.JdbcState;
-import org.apache.storm.jdbc.trident.state.JdbcStateFactory;
-import org.apache.storm.jdbc.trident.state.JdbcUpdater;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentState;
-import org.apache.storm.trident.TridentTopology;
-
-import java.sql.Types;
-
-public class UserPersistanceTridentTopology extends AbstractUserTopology {
-
-    public static void main(String[] args) throws Exception {
-        new UserPersistanceTridentTopology().execute(args);
-    }
-
-    @Override
-    public StormTopology getTopology() {
-        TridentTopology topology = new TridentTopology();
-
-        JdbcState.Options options = new JdbcState.Options()
-                .withConnectionProvider(connectionProvider)
-                .withMapper(this.jdbcMapper)
-                .withJdbcLookupMapper(new SimpleJdbcLookupMapper(new Fields("dept_name"), Lists.newArrayList(new Column("user_id", Types.INTEGER))))
-                .withTableName(TABLE_NAME)
-                .withSelectQuery(SELECT_QUERY);
-
-        JdbcStateFactory jdbcStateFactory = new JdbcStateFactory(options);
-
-        Stream stream = topology.newStream("userSpout", new UserSpout());
-        TridentState state = topology.newStaticState(jdbcStateFactory);
-        stream = stream.stateQuery(state, new Fields("user_id","user_name","create_date"), new JdbcQuery(), new Fields("dept_name"));
-        stream.partitionPersist(jdbcStateFactory, new Fields("user_id","user_name","dept_name","create_date"),  new JdbcUpdater(), new Fields());
-        return topology.build();
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-kafka/src/test/org/apache/storm/kafka/TridentKafkaTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-kafka/src/test/org/apache/storm/kafka/TridentKafkaTopology.java b/external/storm-kafka/src/test/org/apache/storm/kafka/TridentKafkaTopology.java
deleted file mode 100644
index fdc6752..0000000
--- a/external/storm-kafka/src/test/org/apache/storm/kafka/TridentKafkaTopology.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.kafka;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import com.google.common.collect.ImmutableMap;
-import org.apache.storm.kafka.trident.TridentKafkaStateFactory;
-import org.apache.storm.kafka.trident.TridentKafkaUpdater;
-import org.apache.storm.kafka.trident.mapper.FieldNameBasedTupleToKafkaMapper;
-import org.apache.storm.kafka.trident.selector.DefaultTopicSelector;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.testing.FixedBatchSpout;
-
-import java.util.Properties;
-
-public class TridentKafkaTopology {
-
-    private static StormTopology buildTopology(String brokerConnectionString) {
-        Fields fields = new Fields("word", "count");
-        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
-                new Values("storm", "1"),
-                new Values("trident", "1"),
-                new Values("needs", "1"),
-                new Values("javadoc", "1")
-        );
-        spout.setCycle(true);
-
-        TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("spout1", spout);
-
-        Properties props = new Properties();
-        props.put("bootstrap.servers", brokerConnectionString);
-        props.put("acks", "1");
-        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
-        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
-
-        TridentKafkaStateFactory stateFactory = new TridentKafkaStateFactory()
-            .withProducerProperties(props)
-            .withKafkaTopicSelector(new DefaultTopicSelector("test"))
-            .withTridentTupleToKafkaMapper(new FieldNameBasedTupleToKafkaMapper("word", "count"));
-        stream.partitionPersist(stateFactory, fields, new TridentKafkaUpdater(), new Fields());
-
-        return topology.build();
-    }
-
-    /**
-     * To run this topology ensure you have a kafka broker running and provide connection string to broker as argument.
-     * Create a topic test with command line,
-     * kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partition 1 --topic test
-     *
-     * run this program and run the kafka consumer:
-     * kafka-console-consumer.sh --zookeeper localhost:2181 --topic test --from-beginning
-     *
-     * you should see the messages flowing through.
-     *
-     * @param args
-     * @throws Exception
-     */
-    public static void main(String[] args) throws Exception {
-        if(args.length < 1) {
-            System.out.println("Please provide kafka broker url ,e.g. localhost:9092");
-        }
-
-        LocalCluster cluster = new LocalCluster();
-        cluster.submitTopology("wordCounter", new Config(), buildTopology(args[0]));
-        Thread.sleep(60 * 1000);
-        cluster.killTopology("wordCounter");
-
-        cluster.shutdown();
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/InsertWordCount.java
----------------------------------------------------------------------
diff --git a/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/InsertWordCount.java b/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/InsertWordCount.java
deleted file mode 100644
index c83bdbd..0000000
--- a/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/InsertWordCount.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.mongodb.topology;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.mongodb.bolt.MongoInsertBolt;
-import org.apache.storm.mongodb.common.mapper.MongoMapper;
-import org.apache.storm.mongodb.common.mapper.SimpleMongoMapper;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class InsertWordCount {
-    private static final String WORD_SPOUT = "WORD_SPOUT";
-    private static final String COUNT_BOLT = "COUNT_BOLT";
-    private static final String INSERT_BOLT = "INSERT_BOLT";
-
-    private static final String TEST_MONGODB_URL = "mongodb://127.0.0.1:27017/test";
-    private static final String TEST_MONGODB_COLLECTION_NAME = "wordcount";
-    
-
-    public static void main(String[] args) throws Exception {
-        Config config = new Config();
-
-        String url = TEST_MONGODB_URL;
-        String collectionName = TEST_MONGODB_COLLECTION_NAME;
-
-        if (args.length >= 2) {
-            url = args[0];
-            collectionName = args[1];
-        }
-
-        WordSpout spout = new WordSpout();
-        WordCounter bolt = new WordCounter();
-
-        MongoMapper mapper = new SimpleMongoMapper()
-                .withFields("word", "count");
-        
-        MongoInsertBolt insertBolt = new MongoInsertBolt(url, collectionName, mapper);
-
-        // wordSpout ==> countBolt ==> MongoInsertBolt
-        TopologyBuilder builder = new TopologyBuilder();
-
-        builder.setSpout(WORD_SPOUT, spout, 1);
-        builder.setBolt(COUNT_BOLT, bolt, 1).shuffleGrouping(WORD_SPOUT);
-        builder.setBolt(INSERT_BOLT, insertBolt, 1).fieldsGrouping(COUNT_BOLT, new Fields("word"));
-
-
-        if (args.length == 2) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test", config, builder.createTopology());
-            Thread.sleep(30000);
-            cluster.killTopology("test");
-            cluster.shutdown();
-            System.exit(0);
-        } else if (args.length == 3) {
-            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
-        } else{
-            System.out.println("Usage: InsertWordCount <mongodb url> <mongodb collection> [topology name]");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/UpdateWordCount.java
----------------------------------------------------------------------
diff --git a/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/UpdateWordCount.java b/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/UpdateWordCount.java
deleted file mode 100644
index 071708e..0000000
--- a/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/UpdateWordCount.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.mongodb.topology;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.mongodb.bolt.MongoInsertBolt;
-import org.apache.storm.mongodb.bolt.MongoUpdateBolt;
-import org.apache.storm.mongodb.common.QueryFilterCreator;
-import org.apache.storm.mongodb.common.SimpleQueryFilterCreator;
-import org.apache.storm.mongodb.common.mapper.MongoMapper;
-import org.apache.storm.mongodb.common.mapper.SimpleMongoMapper;
-import org.apache.storm.mongodb.common.mapper.SimpleMongoUpdateMapper;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class UpdateWordCount {
-    private static final String WORD_SPOUT = "WORD_SPOUT";
-    private static final String COUNT_BOLT = "COUNT_BOLT";
-    private static final String UPDATE_BOLT = "UPDATE_BOLT";
-
-    private static final String TEST_MONGODB_URL = "mongodb://127.0.0.1:27017/test";
-    private static final String TEST_MONGODB_COLLECTION_NAME = "wordcount";
-    
-
-    public static void main(String[] args) throws Exception {
-        Config config = new Config();
-
-        String url = TEST_MONGODB_URL;
-        String collectionName = TEST_MONGODB_COLLECTION_NAME;
-
-        if (args.length >= 2) {
-            url = args[0];
-            collectionName = args[1];
-        }
-
-        WordSpout spout = new WordSpout();
-        WordCounter bolt = new WordCounter();
-
-        MongoMapper mapper = new SimpleMongoUpdateMapper()
-                .withFields("word", "count");
-
-        QueryFilterCreator updateQueryCreator = new SimpleQueryFilterCreator()
-                .withField("word");
-        
-        MongoUpdateBolt updateBolt = new MongoUpdateBolt(url, collectionName, updateQueryCreator , mapper);
-
-        //if a new document should be inserted if there are no matches to the query filter
-        //updateBolt.withUpsert(true);
-
-        // wordSpout ==> countBolt ==> MongoUpdateBolt
-        TopologyBuilder builder = new TopologyBuilder();
-
-        builder.setSpout(WORD_SPOUT, spout, 1);
-        builder.setBolt(COUNT_BOLT, bolt, 1).shuffleGrouping(WORD_SPOUT);
-        builder.setBolt(UPDATE_BOLT, updateBolt, 1).fieldsGrouping(COUNT_BOLT, new Fields("word"));
-
-
-        if (args.length == 2) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test", config, builder.createTopology());
-            Thread.sleep(30000);
-            cluster.killTopology("test");
-            cluster.shutdown();
-            System.exit(0);
-        } else if (args.length == 3) {
-            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
-        } else{
-            System.out.println("Usage: UpdateWordCount <mongodb url> <mongodb collection> [topology name]");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/WordCounter.java
----------------------------------------------------------------------
diff --git a/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/WordCounter.java b/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/WordCounter.java
deleted file mode 100644
index 481f959..0000000
--- a/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/WordCounter.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.mongodb.topology;
-
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.BasicOutputCollector;
-import org.apache.storm.topology.IBasicBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-import com.google.common.collect.Maps;
-
-import java.util.Map;
-
-import static org.apache.storm.utils.Utils.tuple;
-
-public class WordCounter implements IBasicBolt {
-    private Map<String, Integer> wordCounter = Maps.newHashMap();
-
-    public void prepare(Map stormConf, TopologyContext context) {
-        
-    }
-
-    public void execute(Tuple input, BasicOutputCollector collector) {
-        String word = input.getStringByField("word");
-        int count;
-        if (wordCounter.containsKey(word)) {
-            count = wordCounter.get(word) + 1;
-            wordCounter.put(word, wordCounter.get(word) + 1);
-        } else {
-            count = 1;
-        }
-
-        wordCounter.put(word, count);
-        collector.emit(new Values(word, String.valueOf(count)));
-    }
-
-    public void cleanup() {
-
-    }
-
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(new Fields("word", "count"));
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/WordSpout.java
----------------------------------------------------------------------
diff --git a/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/WordSpout.java b/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/WordSpout.java
deleted file mode 100644
index 284f228..0000000
--- a/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/topology/WordSpout.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.mongodb.topology;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-
-import java.util.Map;
-import java.util.Random;
-import java.util.UUID;
-
-public class WordSpout implements IRichSpout {
-    boolean isDistributed;
-    SpoutOutputCollector collector;
-    public static final String[] words = new String[] { "apple", "orange", "pineapple", "banana", "watermelon" };
-
-    public WordSpout() {
-        this(true);
-    }
-
-    public WordSpout(boolean isDistributed) {
-        this.isDistributed = isDistributed;
-    }
-
-    public boolean isDistributed() {
-        return this.isDistributed;
-    }
-
-    @SuppressWarnings("rawtypes")
-    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-        this.collector = collector;
-    }
-
-    public void close() {
-
-    }
-
-    public void nextTuple() {
-        final Random rand = new Random();
-        final String word = words[rand.nextInt(words.length)];
-        this.collector.emit(new Values(word), UUID.randomUUID());
-        Thread.yield();
-    }
-
-    public void ack(Object msgId) {
-
-    }
-
-    public void fail(Object msgId) {
-
-    }
-
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(new Fields("word"));
-    }
-
-    @Override
-    public void activate() {
-    }
-
-    @Override
-    public void deactivate() {
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/trident/WordCountTrident.java
----------------------------------------------------------------------
diff --git a/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/trident/WordCountTrident.java b/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/trident/WordCountTrident.java
deleted file mode 100644
index 7a18863..0000000
--- a/external/storm-mongodb/src/test/java/org/apache/storm/mongodb/trident/WordCountTrident.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.mongodb.trident;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.mongodb.common.mapper.MongoMapper;
-import org.apache.storm.mongodb.common.mapper.SimpleMongoMapper;
-import org.apache.storm.mongodb.trident.state.MongoState;
-import org.apache.storm.mongodb.trident.state.MongoStateFactory;
-import org.apache.storm.mongodb.trident.state.MongoStateUpdater;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentState;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.state.StateFactory;
-import org.apache.storm.trident.testing.FixedBatchSpout;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-
-public class WordCountTrident {
-
-    public static StormTopology buildTopology(String url, String collectionName){
-        Fields fields = new Fields("word", "count");
-        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
-                new Values("storm", 1),
-                new Values("trident", 1),
-                new Values("needs", 1),
-                new Values("javadoc", 1)
-        );
-        spout.setCycle(true);
-
-        MongoMapper mapper = new SimpleMongoMapper()
-                .withFields("word", "count");
-
-        MongoState.Options options = new MongoState.Options()
-                .withUrl(url)
-                .withCollectionName(collectionName)
-                .withMapper(mapper);
-
-        StateFactory factory = new MongoStateFactory(options);
-
-        TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("spout1", spout);
-
-        stream.partitionPersist(factory, fields,  new MongoStateUpdater(), new Fields());
-        return topology.build();
-    }
-
-    public static void main(String[] args) throws Exception {
-        Config conf = new Config();
-        conf.setMaxSpoutPending(5);
-        if (args.length == 2) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("wordCounter", conf, buildTopology(args[0], args[1]));
-            Thread.sleep(60 * 1000);
-            cluster.killTopology("wordCounter");
-            cluster.shutdown();
-            System.exit(0);
-        }
-        else if(args.length == 3) {
-            conf.setNumWorkers(3);
-            StormSubmitter.submitTopology(args[2], conf, buildTopology(args[0], args[1]));
-        } else{
-            System.out.println("Usage: WordCountTrident <mongodb url> <mongodb collection> [topology name]");
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mqtt/examples/pom.xml
----------------------------------------------------------------------
diff --git a/external/storm-mqtt/examples/pom.xml b/external/storm-mqtt/examples/pom.xml
deleted file mode 100644
index 3b152ae..0000000
--- a/external/storm-mqtt/examples/pom.xml
+++ /dev/null
@@ -1,115 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-
-  <artifactId>storm-mqtt-examples</artifactId>
-  <packaging>jar</packaging>
-
-  <name>storm-mqtt-examples</name>
-
-  <parent>
-    <groupId>org.apache.storm</groupId>
-    <artifactId>storm-mqtt-parent</artifactId>
-    <version>1.1.0-SNAPSHOT</version>
-    <relativePath>../pom.xml</relativePath>
-  </parent>
-
-  <properties>
-    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-  </properties>
-
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.storm</groupId>
-      <artifactId>storm-mqtt</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.logging.log4j</groupId>
-      <artifactId>log4j-core</artifactId>
-      <version>2.1</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.logging.log4j</groupId>
-      <artifactId>log4j-slf4j-impl</artifactId>
-      <version>2.1</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.storm</groupId>
-      <artifactId>storm-core</artifactId>
-      <version>${project.version}</version>
-      <scope>${provided.scope}</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.storm</groupId>
-      <artifactId>flux-core</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.fusesource.mqtt-client</groupId>
-      <artifactId>mqtt-client</artifactId>
-      <version>1.10</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.activemq</groupId>
-      <artifactId>activemq-broker</artifactId>
-      <version>5.9.0</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.activemq</groupId>
-      <artifactId>activemq-mqtt</artifactId>
-      <version>5.9.0</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.activemq</groupId>
-      <artifactId>activemq-kahadb-store</artifactId>
-      <version>5.9.0</version>
-    </dependency>
-  </dependencies>
-  <build>
-    <plugins>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-shade-plugin</artifactId>
-        <version>1.4</version>
-        <configuration>
-          <createDependencyReducedPom>true</createDependencyReducedPom>
-        </configuration>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-            <configuration>
-              <transformers>
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
-                  <mainClass>org.apache.storm.flux.Flux</mainClass>
-                </transformer>
-              </transformers>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-
-</project>

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mqtt/examples/src/main/flux/sample.yaml
----------------------------------------------------------------------
diff --git a/external/storm-mqtt/examples/src/main/flux/sample.yaml b/external/storm-mqtt/examples/src/main/flux/sample.yaml
deleted file mode 100644
index c2902dc..0000000
--- a/external/storm-mqtt/examples/src/main/flux/sample.yaml
+++ /dev/null
@@ -1,62 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
----
-
-# topology definition
-# name to be used when submitting
-name: "mqtt-topology"
-
-components:
-   ########## MQTT Spout Config ############
-  - id: "mqtt-type"
-    className: "org.apache.storm.mqtt.examples.CustomMessageMapper"
-
-  - id: "mqtt-options"
-    className: "org.apache.storm.mqtt.common.MqttOptions"
-    properties:
-      - name: "url"
-        value: "tcp://localhost:1883"
-      - name: "topics"
-        value:
-          - "/users/tgoetz/#"
-
-# topology configuration
-config:
-  topology.workers: 1
-  topology.max.spout.pending: 1000
-
-# spout definitions
-spouts:
-  - id: "mqtt-spout"
-    className: "org.apache.storm.mqtt.spout.MqttSpout"
-    constructorArgs:
-      - ref: "mqtt-type"
-      - ref: "mqtt-options"
-    parallelism: 1
-
-# bolt definitions
-bolts:
-  - id: "log"
-    className: "org.apache.storm.flux.wrappers.bolts.LogInfoBolt"
-    parallelism: 1
-
-
-streams:
-  - from: "mqtt-spout"
-    to: "log"
-    grouping:
-      type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mqtt/examples/src/main/flux/ssl-sample.yaml
----------------------------------------------------------------------
diff --git a/external/storm-mqtt/examples/src/main/flux/ssl-sample.yaml b/external/storm-mqtt/examples/src/main/flux/ssl-sample.yaml
deleted file mode 100644
index bfb668d..0000000
--- a/external/storm-mqtt/examples/src/main/flux/ssl-sample.yaml
+++ /dev/null
@@ -1,78 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
----
-
-# topology definition
-# name to be used when submitting
-name: "mqtt-topology"
-
-components:
-   ########## MQTT Spout Config ############
-  - id: "mqtt-type"
-    className: "org.apache.storm.mqtt.examples.CustomMessageMapper"
-
-  - id: "keystore-loader"
-    className: "org.apache.storm.mqtt.ssl.DefaultKeyStoreLoader"
-    constructorArgs:
-      - "keystore.jks"
-      - "truststore.jks"
-    properties:
-      - name: "keyPassword"
-        value: "password"
-      - name: "keyStorePassword"
-        value: "password"
-      - name: "trustStorePassword"
-        value: "password"
-
-  - id: "mqtt-options"
-    className: "org.apache.storm.mqtt.common.MqttOptions"
-    properties:
-      - name: "url"
-        value: "ssl://raspberrypi.local:8883"
-      - name: "topics"
-        value:
-          - "/users/tgoetz/#"
-
-# topology configuration
-config:
-  topology.workers: 1
-  topology.max.spout.pending: 1000
-
-# spout definitions
-spouts:
-  - id: "mqtt-spout"
-    className: "org.apache.storm.mqtt.spout.MqttSpout"
-    constructorArgs:
-      - ref: "mqtt-type"
-      - ref: "mqtt-options"
-      - ref: "keystore-loader"
-    parallelism: 1
-
-# bolt definitions
-bolts:
-
-  - id: "log"
-    className: "org.apache.storm.flux.wrappers.bolts.LogInfoBolt"
-    parallelism: 1
-
-
-streams:
-
-  - from: "mqtt-spout"
-    to: "log"
-    grouping:
-      type: SHUFFLE


[05/10] storm git commit: STORM-1970: external project examples refator

Posted by ka...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mqtt-examples/src/main/flux/sample.yaml
----------------------------------------------------------------------
diff --git a/examples/storm-mqtt-examples/src/main/flux/sample.yaml b/examples/storm-mqtt-examples/src/main/flux/sample.yaml
new file mode 100644
index 0000000..c2902dc
--- /dev/null
+++ b/examples/storm-mqtt-examples/src/main/flux/sample.yaml
@@ -0,0 +1,62 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+---
+
+# topology definition
+# name to be used when submitting
+name: "mqtt-topology"
+
+components:
+   ########## MQTT Spout Config ############
+  - id: "mqtt-type"
+    className: "org.apache.storm.mqtt.examples.CustomMessageMapper"
+
+  - id: "mqtt-options"
+    className: "org.apache.storm.mqtt.common.MqttOptions"
+    properties:
+      - name: "url"
+        value: "tcp://localhost:1883"
+      - name: "topics"
+        value:
+          - "/users/tgoetz/#"
+
+# topology configuration
+config:
+  topology.workers: 1
+  topology.max.spout.pending: 1000
+
+# spout definitions
+spouts:
+  - id: "mqtt-spout"
+    className: "org.apache.storm.mqtt.spout.MqttSpout"
+    constructorArgs:
+      - ref: "mqtt-type"
+      - ref: "mqtt-options"
+    parallelism: 1
+
+# bolt definitions
+bolts:
+  - id: "log"
+    className: "org.apache.storm.flux.wrappers.bolts.LogInfoBolt"
+    parallelism: 1
+
+
+streams:
+  - from: "mqtt-spout"
+    to: "log"
+    grouping:
+      type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mqtt-examples/src/main/flux/ssl-sample.yaml
----------------------------------------------------------------------
diff --git a/examples/storm-mqtt-examples/src/main/flux/ssl-sample.yaml b/examples/storm-mqtt-examples/src/main/flux/ssl-sample.yaml
new file mode 100644
index 0000000..bfb668d
--- /dev/null
+++ b/examples/storm-mqtt-examples/src/main/flux/ssl-sample.yaml
@@ -0,0 +1,78 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+---
+
+# topology definition
+# name to be used when submitting
+name: "mqtt-topology"
+
+components:
+   ########## MQTT Spout Config ############
+  - id: "mqtt-type"
+    className: "org.apache.storm.mqtt.examples.CustomMessageMapper"
+
+  - id: "keystore-loader"
+    className: "org.apache.storm.mqtt.ssl.DefaultKeyStoreLoader"
+    constructorArgs:
+      - "keystore.jks"
+      - "truststore.jks"
+    properties:
+      - name: "keyPassword"
+        value: "password"
+      - name: "keyStorePassword"
+        value: "password"
+      - name: "trustStorePassword"
+        value: "password"
+
+  - id: "mqtt-options"
+    className: "org.apache.storm.mqtt.common.MqttOptions"
+    properties:
+      - name: "url"
+        value: "ssl://raspberrypi.local:8883"
+      - name: "topics"
+        value:
+          - "/users/tgoetz/#"
+
+# topology configuration
+config:
+  topology.workers: 1
+  topology.max.spout.pending: 1000
+
+# spout definitions
+spouts:
+  - id: "mqtt-spout"
+    className: "org.apache.storm.mqtt.spout.MqttSpout"
+    constructorArgs:
+      - ref: "mqtt-type"
+      - ref: "mqtt-options"
+      - ref: "keystore-loader"
+    parallelism: 1
+
+# bolt definitions
+bolts:
+
+  - id: "log"
+    className: "org.apache.storm.flux.wrappers.bolts.LogInfoBolt"
+    parallelism: 1
+
+
+streams:
+
+  - from: "mqtt-spout"
+    to: "log"
+    grouping:
+      type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mqtt-examples/src/main/java/org/apache/storm/mqtt/examples/CustomMessageMapper.java
----------------------------------------------------------------------
diff --git a/examples/storm-mqtt-examples/src/main/java/org/apache/storm/mqtt/examples/CustomMessageMapper.java b/examples/storm-mqtt-examples/src/main/java/org/apache/storm/mqtt/examples/CustomMessageMapper.java
new file mode 100644
index 0000000..ec5645c
--- /dev/null
+++ b/examples/storm-mqtt-examples/src/main/java/org/apache/storm/mqtt/examples/CustomMessageMapper.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.mqtt.examples;
+
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.mqtt.MqttMessage;
+import org.apache.storm.mqtt.MqttMessageMapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Given a topic name: "users/{user}/{location}/{deviceId}"
+ * and a payload of "{temperature}/{humidity}"
+ * emits a tuple containing user(String), deviceId(String), location(String), temperature(float), humidity(float)
+ *
+ */
+public class CustomMessageMapper implements MqttMessageMapper {
+    private static final Logger LOG = LoggerFactory.getLogger(CustomMessageMapper.class);
+
+
+    public Values toValues(MqttMessage message) {
+        String topic = message.getTopic();
+        String[] topicElements = topic.split("/");
+        String[] payloadElements = new String(message.getMessage()).split("/");
+
+        return new Values(topicElements[2], topicElements[4], topicElements[3], Float.parseFloat(payloadElements[0]),
+                Float.parseFloat(payloadElements[1]));
+    }
+
+    public Fields outputFields() {
+        return new Fields("user", "deviceId", "location", "temperature", "humidity");
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mqtt-examples/src/main/java/org/apache/storm/mqtt/examples/MqttBrokerPublisher.java
----------------------------------------------------------------------
diff --git a/examples/storm-mqtt-examples/src/main/java/org/apache/storm/mqtt/examples/MqttBrokerPublisher.java b/examples/storm-mqtt-examples/src/main/java/org/apache/storm/mqtt/examples/MqttBrokerPublisher.java
new file mode 100644
index 0000000..fa8389d
--- /dev/null
+++ b/examples/storm-mqtt-examples/src/main/java/org/apache/storm/mqtt/examples/MqttBrokerPublisher.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.mqtt.examples;
+
+
+import org.apache.activemq.broker.BrokerService;
+import org.apache.storm.mqtt.MqttLogger;
+import org.fusesource.mqtt.client.BlockingConnection;
+import org.fusesource.mqtt.client.MQTT;
+import org.fusesource.mqtt.client.QoS;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Random;
+
+public class MqttBrokerPublisher {
+    private static final Logger LOG = LoggerFactory.getLogger(MqttBrokerPublisher.class);
+
+    private static BrokerService broker;
+
+    private static BlockingConnection connection;
+
+
+    public static void startBroker() throws Exception {
+        LOG.info("Starting broker...");
+        broker = new BrokerService();
+        broker.addConnector("mqtt://localhost:1883");
+        broker.setDataDirectory("target");
+        broker.start();
+        LOG.info("MQTT broker started");
+        Runtime.getRuntime().addShutdownHook(new Thread(){
+            @Override
+            public void run() {
+                try {
+                    LOG.info("Shutting down MQTT broker...");
+                    broker.stop();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        });
+    }
+
+    public static void startPublisher() throws Exception {
+        MQTT client = new MQTT();
+        client.setTracer(new MqttLogger());
+        client.setHost("tcp://localhost:1883");
+        client.setClientId("MqttBrokerPublisher");
+        connection = client.blockingConnection();
+
+        Runtime.getRuntime().addShutdownHook(new Thread(){
+            @Override
+            public void run() {
+                try {
+                    LOG.info("Shutting down MQTT client...");
+                    connection.disconnect();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        });
+
+        connection.connect();
+    }
+
+    public static void publish() throws Exception {
+        String topic = "/users/tgoetz/office/1234";
+        Random rand = new Random();
+        LOG.info("Publishing to topic {}", topic);
+        LOG.info("Cntrl+C to exit.");
+
+        while(true) {
+            int temp = rand.nextInt(100);
+            int hum = rand.nextInt(100);
+            String payload = temp + "/" + hum;
+
+            connection.publish(topic, payload.getBytes(), QoS.AT_LEAST_ONCE, false);
+            Thread.sleep(500);
+        }
+    }
+
+    public static void main(String[] args) throws Exception{
+        startBroker();
+        startPublisher();
+        publish();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mqtt-examples/src/main/resources/log4j2.xml
----------------------------------------------------------------------
diff --git a/examples/storm-mqtt-examples/src/main/resources/log4j2.xml b/examples/storm-mqtt-examples/src/main/resources/log4j2.xml
new file mode 100644
index 0000000..bfe57a1
--- /dev/null
+++ b/examples/storm-mqtt-examples/src/main/resources/log4j2.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<Configuration status="WARN">
+    <Appenders>
+        <Console name="Console" target="SYSTEM_OUT">
+            <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
+        </Console>
+    </Appenders>
+
+    <Loggers>
+        <Logger name="org.apache.storm.flux.wrappers" level="INFO"/>
+        <Logger name="org.apache.storm.mqtt" level="DEBUG"/>
+        <Root level="error">
+            <AppenderRef ref="Console"/>
+        </Root>
+    </Loggers>
+</Configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-opentsdb-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-opentsdb-examples/pom.xml b/examples/storm-opentsdb-examples/pom.xml
new file mode 100644
index 0000000..057efae
--- /dev/null
+++ b/examples/storm-opentsdb-examples/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>storm</artifactId>
+        <groupId>org.apache.storm</groupId>
+        <version>1.1.0-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>storm-opentsdb-examples</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-opentsdb</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/MetricGenBatchSpout.java
----------------------------------------------------------------------
diff --git a/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/MetricGenBatchSpout.java b/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/MetricGenBatchSpout.java
new file mode 100644
index 0000000..b0580f6
--- /dev/null
+++ b/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/MetricGenBatchSpout.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.storm.opentsdb;
+
+import com.google.common.collect.Lists;
+import org.apache.storm.Config;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.trident.operation.TridentCollector;
+import org.apache.storm.trident.spout.IBatchSpout;
+import org.apache.storm.tuple.Fields;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+/**
+ * BatchSpout implementation for metrics generation.
+ */
+public class MetricGenBatchSpout implements IBatchSpout {
+
+    private int batchSize;
+    private final Map<Long, List<List<Object>>> batches = new HashMap<>();
+
+    public MetricGenBatchSpout(int batchSize) {
+        this.batchSize = batchSize;
+    }
+
+    @Override
+    public void open(Map conf, TopologyContext context) {
+
+    }
+
+    @Override
+    public void emitBatch(long batchId, TridentCollector collector) {
+        List<List<Object>> values;
+        if(batches.containsKey(batchId)) {
+            values = batches.get(batchId);
+        } else {
+            values = new ArrayList<>();
+            for (int i = 0; i < batchSize; i++) {
+                // tuple values are mapped with
+                // metric, timestamp, value, Map of tagK/tagV respectively.
+                values.add(Lists.newArrayList(Lists.newArrayList("device.temp", System.currentTimeMillis(), new Random().nextLong(),
+                        Collections.singletonMap("loc.id", new Random().nextInt() % 64 + ""))));
+            }
+            batches.put(batchId, values);
+        }
+        for (List<Object> value : values) {
+            collector.emit(value);
+        }
+
+    }
+
+    @Override
+    public void ack(long batchId) {
+        batches.remove(batchId);
+    }
+
+    @Override
+    public void close() {
+
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        Config conf = new Config();
+        conf.setMaxTaskParallelism(1);
+        return conf;
+    }
+
+    @Override
+    public Fields getOutputFields() {
+        return MetricGenSpout.DEFAULT_METRIC_FIELDS;
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/MetricGenSpout.java
----------------------------------------------------------------------
diff --git a/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/MetricGenSpout.java b/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/MetricGenSpout.java
new file mode 100644
index 0000000..21af196
--- /dev/null
+++ b/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/MetricGenSpout.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.storm.opentsdb;
+
+import com.google.common.collect.Lists;
+import org.apache.storm.opentsdb.bolt.TupleOpenTsdbDatapointMapper;
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.base.BaseRichSpout;
+import org.apache.storm.tuple.Fields;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.Random;
+
+/**
+ * Spout to generate tuples containing metric data.
+ */
+public class MetricGenSpout extends BaseRichSpout {
+
+    public static final Fields DEFAULT_METRIC_FIELDS =
+            new Fields(TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER.getMetricField(),
+                    TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER.getTimestampField(),
+                    TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER.getValueField(),
+                    TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER.getTagsField());
+
+    private Map conf;
+    private TopologyContext context;
+    private SpoutOutputCollector collector;
+
+    @Override
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(DEFAULT_METRIC_FIELDS);
+    }
+
+    @Override
+    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+        this.conf = conf;
+        this.context = context;
+        this.collector = collector;
+    }
+
+    @Override
+    public void nextTuple() {
+        try {
+            Thread.sleep(100);
+        } catch (InterruptedException e) {
+            // ignore
+        }
+        // tuple values are mapped with
+        // metric, timestamp, value, Map of tagK/tagV respectively.
+        collector.emit(Lists.newArrayList("device.temp", System.currentTimeMillis(), new Random().nextLong(),
+                Collections.singletonMap("loc.id", new Random().nextInt() % 64 + "")));
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/SampleOpenTsdbBoltTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/SampleOpenTsdbBoltTopology.java b/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/SampleOpenTsdbBoltTopology.java
new file mode 100644
index 0000000..6c511b8
--- /dev/null
+++ b/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/SampleOpenTsdbBoltTopology.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.storm.opentsdb;
+
+import org.apache.storm.opentsdb.bolt.OpenTsdbBolt;
+import org.apache.storm.opentsdb.bolt.TupleOpenTsdbDatapointMapper;
+import org.apache.storm.opentsdb.client.OpenTsdbClient;
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.topology.TopologyBuilder;
+
+import java.util.Collections;
+
+/**
+ * Sample application to use OpenTSDB bolt.
+ */
+public class SampleOpenTsdbBoltTopology {
+
+    public static void main(String[] args) throws Exception {
+        if(args.length == 0) {
+            throw new IllegalArgumentException("There should be at least one argument. Run as `SampleOpenTsdbBoltTopology <tsdb-url>`");
+        }
+
+        TopologyBuilder topologyBuilder = new TopologyBuilder();
+
+        topologyBuilder.setSpout("metric-gen", new MetricGenSpout(), 5);
+
+        String openTsdbUrl = args[0];
+        OpenTsdbClient.Builder builder =  OpenTsdbClient.newBuilder(openTsdbUrl).sync(30_000).returnDetails();
+        final OpenTsdbBolt openTsdbBolt = new OpenTsdbBolt(builder, Collections.singletonList(TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER));
+        openTsdbBolt.withBatchSize(10).withFlushInterval(2).failTupleForFailedMetrics();
+        topologyBuilder.setBolt("opentsdb", openTsdbBolt).shuffleGrouping("metric-gen");
+
+        Config conf = new Config();
+        conf.setDebug(true);
+
+        if (args.length > 1) {
+            conf.setNumWorkers(3);
+
+            StormSubmitter.submitTopologyWithProgressBar(args[1], conf, topologyBuilder.createTopology());
+        } else {
+            conf.setMaxTaskParallelism(3);
+
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("word-count", conf, topologyBuilder.createTopology());
+
+            Thread.sleep(30000);
+
+            cluster.shutdown();
+            System.exit(0);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/SampleOpenTsdbTridentTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/SampleOpenTsdbTridentTopology.java b/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/SampleOpenTsdbTridentTopology.java
new file mode 100644
index 0000000..db51a8a
--- /dev/null
+++ b/examples/storm-opentsdb-examples/src/main/java/org/apache/storm/opentsdb/SampleOpenTsdbTridentTopology.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.storm.opentsdb;
+
+import org.apache.storm.opentsdb.bolt.TupleOpenTsdbDatapointMapper;
+import org.apache.storm.opentsdb.client.OpenTsdbClient;
+import org.apache.storm.opentsdb.trident.OpenTsdbStateFactory;
+import org.apache.storm.opentsdb.trident.OpenTsdbStateUpdater;
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.operation.Consumer;
+import org.apache.storm.trident.tuple.TridentTuple;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+
+/**
+ * Sample trident topology to store time series metrics in to OpenTsdb.
+ */
+public class SampleOpenTsdbTridentTopology {
+    private static final Logger LOG = LoggerFactory.getLogger(SampleOpenTsdbTridentTopology.class);
+
+    public static void main(String[] args) throws Exception {
+        if(args.length == 0) {
+            throw new IllegalArgumentException("There should be at least one argument. Run as `SampleOpenTsdbTridentTopology <tsdb-url>`");
+        }
+
+        String tsdbUrl = args[0];
+
+
+        final OpenTsdbClient.Builder openTsdbClientBuilder = OpenTsdbClient.newBuilder(tsdbUrl);
+        final OpenTsdbStateFactory openTsdbStateFactory =
+                new OpenTsdbStateFactory(openTsdbClientBuilder,
+                        Collections.singletonList(TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER));
+
+        TridentTopology tridentTopology = new TridentTopology();
+        final Stream stream = tridentTopology.newStream("metric-tsdb-stream", new MetricGenBatchSpout(10));
+
+        stream.peek(new Consumer() {
+            @Override
+            public void accept(TridentTuple input) {
+                LOG.info("########### Received tuple: [{}]", input);
+            }
+        }).partitionPersist(openTsdbStateFactory, MetricGenSpout.DEFAULT_METRIC_FIELDS, new OpenTsdbStateUpdater());
+
+
+        Config conf = new Config();
+        conf.setDebug(true);
+
+        if (args.length > 1) {
+            conf.setNumWorkers(3);
+
+            StormSubmitter.submitTopologyWithProgressBar(args[1], conf, tridentTopology.build());
+        } else {
+            conf.setMaxTaskParallelism(3);
+
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("word-count", conf, tridentTopology.build());
+
+            Thread.sleep(30000);
+
+            cluster.shutdown();
+            System.exit(0);
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/pom.xml b/examples/storm-redis-examples/pom.xml
new file mode 100644
index 0000000..91a8660
--- /dev/null
+++ b/examples/storm-redis-examples/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>storm</artifactId>
+        <groupId>org.apache.storm</groupId>
+        <version>1.1.0-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>storm-redis-examples</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-redis</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/LookupWordCount.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/LookupWordCount.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/LookupWordCount.java
new file mode 100644
index 0000000..f62b7b0
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/LookupWordCount.java
@@ -0,0 +1,166 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.topology;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.task.OutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.topology.base.BaseRichBolt;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.ITuple;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.tuple.Values;
+import com.google.common.collect.Lists;
+import org.apache.storm.redis.bolt.RedisLookupBolt;
+import org.apache.storm.redis.common.config.JedisPoolConfig;
+import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
+import org.apache.storm.redis.common.mapper.RedisLookupMapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+public class LookupWordCount {
+    private static final String WORD_SPOUT = "WORD_SPOUT";
+    private static final String LOOKUP_BOLT = "LOOKUP_BOLT";
+    private static final String PRINT_BOLT = "PRINT_BOLT";
+
+    private static final String TEST_REDIS_HOST = "127.0.0.1";
+    private static final int TEST_REDIS_PORT = 6379;
+
+    public static class PrintWordTotalCountBolt extends BaseRichBolt {
+        private static final Logger LOG = LoggerFactory.getLogger(PrintWordTotalCountBolt.class);
+        private static final Random RANDOM = new Random();
+        private OutputCollector collector;
+
+        @Override
+        public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+            this.collector = collector;
+        }
+
+        @Override
+        public void execute(Tuple input) {
+            String wordName = input.getStringByField("wordName");
+            String countStr = input.getStringByField("count");
+
+            // print lookup result with low probability
+            if(RANDOM.nextInt(1000) > 995) {
+                int count = 0;
+                if (countStr != null) {
+                    count = Integer.parseInt(countStr);
+                }
+                LOG.info("Lookup result - word : " + wordName + " / count : " + count);
+            }
+
+            collector.ack(input);
+        }
+
+        @Override
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        Config config = new Config();
+
+        String host = TEST_REDIS_HOST;
+        int port = TEST_REDIS_PORT;
+
+        if (args.length >= 2) {
+            host = args[0];
+            port = Integer.parseInt(args[1]);
+        }
+
+        JedisPoolConfig poolConfig = new JedisPoolConfig.Builder()
+                .setHost(host).setPort(port).build();
+
+        WordSpout spout = new WordSpout();
+        RedisLookupMapper lookupMapper = setupLookupMapper();
+        RedisLookupBolt lookupBolt = new RedisLookupBolt(poolConfig, lookupMapper);
+
+        PrintWordTotalCountBolt printBolt = new PrintWordTotalCountBolt();
+
+        //wordspout -> lookupbolt
+        TopologyBuilder builder = new TopologyBuilder();
+        builder.setSpout(WORD_SPOUT, spout, 1);
+        builder.setBolt(LOOKUP_BOLT, lookupBolt, 1).shuffleGrouping(WORD_SPOUT);
+        builder.setBolt(PRINT_BOLT, printBolt, 1).shuffleGrouping(LOOKUP_BOLT);
+
+        if (args.length == 2) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test", config, builder.createTopology());
+            Thread.sleep(30000);
+            cluster.killTopology("test");
+            cluster.shutdown();
+            System.exit(0);
+        } else if (args.length == 3) {
+            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
+        } else{
+            System.out.println("Usage: LookupWordCount <redis host> <redis port> (topology name)");
+        }
+    }
+
+    private static RedisLookupMapper setupLookupMapper() {
+        return new WordCountRedisLookupMapper();
+    }
+
+    private static class WordCountRedisLookupMapper implements RedisLookupMapper {
+        private RedisDataTypeDescription description;
+        private final String hashKey = "wordCount";
+
+        public WordCountRedisLookupMapper() {
+            description = new RedisDataTypeDescription(
+                    RedisDataTypeDescription.RedisDataType.HASH, hashKey);
+        }
+
+        @Override
+        public List<Values> toTuple(ITuple input, Object value) {
+            String member = getKeyFromTuple(input);
+            List<Values> values = Lists.newArrayList();
+            values.add(new Values(member, value));
+            return values;
+        }
+
+        @Override
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+            declarer.declare(new Fields("wordName", "count"));
+        }
+
+        @Override
+        public RedisDataTypeDescription getDataTypeDescription() {
+            return description;
+        }
+
+        @Override
+        public String getKeyFromTuple(ITuple tuple) {
+            return tuple.getStringByField("word");
+        }
+
+        @Override
+        public String getValueFromTuple(ITuple tuple) {
+            return null;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/PersistentWordCount.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/PersistentWordCount.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/PersistentWordCount.java
new file mode 100644
index 0000000..d46bab6
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/PersistentWordCount.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.topology;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.ITuple;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.redis.bolt.AbstractRedisBolt;
+import org.apache.storm.redis.bolt.RedisStoreBolt;
+import org.apache.storm.redis.common.config.JedisClusterConfig;
+import org.apache.storm.redis.common.config.JedisPoolConfig;
+import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
+import org.apache.storm.redis.common.mapper.RedisStoreMapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import redis.clients.jedis.JedisCommands;
+import redis.clients.jedis.exceptions.JedisConnectionException;
+import redis.clients.jedis.exceptions.JedisException;
+
+public class PersistentWordCount {
+    private static final String WORD_SPOUT = "WORD_SPOUT";
+    private static final String COUNT_BOLT = "COUNT_BOLT";
+    private static final String STORE_BOLT = "STORE_BOLT";
+
+    private static final String TEST_REDIS_HOST = "127.0.0.1";
+    private static final int TEST_REDIS_PORT = 6379;
+
+    public static void main(String[] args) throws Exception {
+        Config config = new Config();
+
+        String host = TEST_REDIS_HOST;
+        int port = TEST_REDIS_PORT;
+
+        if (args.length >= 2) {
+            host = args[0];
+            port = Integer.parseInt(args[1]);
+        }
+
+        JedisPoolConfig poolConfig = new JedisPoolConfig.Builder()
+                .setHost(host).setPort(port).build();
+
+        WordSpout spout = new WordSpout();
+        WordCounter bolt = new WordCounter();
+        RedisStoreMapper storeMapper = setupStoreMapper();
+        RedisStoreBolt storeBolt = new RedisStoreBolt(poolConfig, storeMapper);
+
+        // wordSpout ==> countBolt ==> RedisBolt
+        TopologyBuilder builder = new TopologyBuilder();
+
+        builder.setSpout(WORD_SPOUT, spout, 1);
+        builder.setBolt(COUNT_BOLT, bolt, 1).fieldsGrouping(WORD_SPOUT, new Fields("word"));
+        builder.setBolt(STORE_BOLT, storeBolt, 1).shuffleGrouping(COUNT_BOLT);
+
+        if (args.length == 2) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test", config, builder.createTopology());
+            Thread.sleep(30000);
+            cluster.killTopology("test");
+            cluster.shutdown();
+            System.exit(0);
+        } else if (args.length == 3) {
+            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
+        } else {
+            System.out.println("Usage: PersistentWordCount <redis host> <redis port> (topology name)");
+        }
+    }
+
+    private static RedisStoreMapper setupStoreMapper() {
+        return new WordCountStoreMapper();
+    }
+
+    private static class WordCountStoreMapper implements RedisStoreMapper {
+        private RedisDataTypeDescription description;
+        private final String hashKey = "wordCount";
+
+        public WordCountStoreMapper() {
+            description = new RedisDataTypeDescription(
+                    RedisDataTypeDescription.RedisDataType.HASH, hashKey);
+        }
+
+        @Override
+        public RedisDataTypeDescription getDataTypeDescription() {
+            return description;
+        }
+
+        @Override
+        public String getKeyFromTuple(ITuple tuple) {
+            return tuple.getStringByField("word");
+        }
+
+        @Override
+        public String getValueFromTuple(ITuple tuple) {
+            return tuple.getStringByField("count");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WhitelistWordCount.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WhitelistWordCount.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WhitelistWordCount.java
new file mode 100644
index 0000000..bcb2e0b
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WhitelistWordCount.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.topology;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.redis.bolt.RedisFilterBolt;
+import org.apache.storm.redis.common.config.JedisPoolConfig;
+import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
+import org.apache.storm.redis.common.mapper.RedisFilterMapper;
+import org.apache.storm.task.OutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.topology.base.BaseRichBolt;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.ITuple;
+import org.apache.storm.tuple.Tuple;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+import java.util.Random;
+
+public class WhitelistWordCount {
+    private static final String WORD_SPOUT = "WORD_SPOUT";
+    private static final String WHITELIST_BOLT = "WHITELIST_BOLT";
+    private static final String COUNT_BOLT = "COUNT_BOLT";
+    private static final String PRINT_BOLT = "PRINT_BOLT";
+
+    private static final String TEST_REDIS_HOST = "127.0.0.1";
+    private static final int TEST_REDIS_PORT = 6379;
+
+    public static class PrintWordTotalCountBolt extends BaseRichBolt {
+        private static final Logger LOG = LoggerFactory.getLogger(PrintWordTotalCountBolt.class);
+        private static final Random RANDOM = new Random();
+        private OutputCollector collector;
+
+        @Override
+        public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+            this.collector = collector;
+        }
+
+        @Override
+        public void execute(Tuple input) {
+            String wordName = input.getStringByField("word");
+            String countStr = input.getStringByField("count");
+
+            // print lookup result with low probability
+            if(RANDOM.nextInt(1000) > 995) {
+                int count = 0;
+                if (countStr != null) {
+                    count = Integer.parseInt(countStr);
+                }
+                LOG.info("Count result - word : " + wordName + " / count : " + count);
+            }
+
+            collector.ack(input);
+        }
+
+        @Override
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        Config config = new Config();
+
+        String host = TEST_REDIS_HOST;
+        int port = TEST_REDIS_PORT;
+
+        if (args.length >= 2) {
+            host = args[0];
+            port = Integer.parseInt(args[1]);
+        }
+
+        JedisPoolConfig poolConfig = new JedisPoolConfig.Builder()
+                .setHost(host).setPort(port).build();
+
+        WordSpout spout = new WordSpout();
+        RedisFilterMapper filterMapper = setupWhitelistMapper();
+        RedisFilterBolt whitelistBolt = new RedisFilterBolt(poolConfig, filterMapper);
+        WordCounter wordCounterBolt = new WordCounter();
+        PrintWordTotalCountBolt printBolt = new PrintWordTotalCountBolt();
+
+        TopologyBuilder builder = new TopologyBuilder();
+        builder.setSpout(WORD_SPOUT, spout, 1);
+        builder.setBolt(WHITELIST_BOLT, whitelistBolt, 1).shuffleGrouping(WORD_SPOUT);
+        builder.setBolt(COUNT_BOLT, wordCounterBolt, 1).fieldsGrouping(WHITELIST_BOLT, new Fields("word"));
+        builder.setBolt(PRINT_BOLT, printBolt, 1).shuffleGrouping(COUNT_BOLT);
+
+        if (args.length == 2) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test", config, builder.createTopology());
+            Thread.sleep(30000);
+            cluster.killTopology("test");
+            cluster.shutdown();
+            System.exit(0);
+        } else if (args.length == 3) {
+            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
+        } else{
+            System.out.println("Usage: WhitelistWordCount <redis host> <redis port> (topology name)");
+        }
+    }
+
+    private static RedisFilterMapper setupWhitelistMapper() {
+        return new WhitelistWordFilterMapper();
+    }
+
+    private static class WhitelistWordFilterMapper implements RedisFilterMapper {
+        private RedisDataTypeDescription description;
+        private final String setKey = "whitelist";
+
+        public WhitelistWordFilterMapper() {
+            description = new RedisDataTypeDescription(
+                    RedisDataTypeDescription.RedisDataType.SET, setKey);
+        }
+
+        @Override
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+            declarer.declare(new Fields("word"));
+        }
+
+        @Override
+        public RedisDataTypeDescription getDataTypeDescription() {
+            return description;
+        }
+
+        @Override
+        public String getKeyFromTuple(ITuple tuple) {
+            return tuple.getStringByField("word");
+        }
+
+        @Override
+        public String getValueFromTuple(ITuple tuple) {
+            return null;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WordCounter.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WordCounter.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WordCounter.java
new file mode 100644
index 0000000..6fa930c
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WordCounter.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.topology;
+
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.BasicOutputCollector;
+import org.apache.storm.topology.IBasicBolt;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.tuple.Values;
+import com.google.common.collect.Maps;
+
+import java.util.Map;
+
+import static org.apache.storm.utils.Utils.tuple;
+
+public class WordCounter implements IBasicBolt {
+    private Map<String, Integer> wordCounter = Maps.newHashMap();
+
+    @SuppressWarnings("rawtypes")
+    public void prepare(Map stormConf, TopologyContext context) {
+    }
+
+    public void execute(Tuple input, BasicOutputCollector collector) {
+        String word = input.getStringByField("word");
+        int count;
+        if (wordCounter.containsKey(word)) {
+            count = wordCounter.get(word) + 1;
+            wordCounter.put(word, wordCounter.get(word) + 1);
+        } else {
+            count = 1;
+        }
+
+        wordCounter.put(word, count);
+        collector.emit(new Values(word, String.valueOf(count)));
+    }
+
+    public void cleanup() {
+
+    }
+
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(new Fields("word", "count"));
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return null;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WordSpout.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WordSpout.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WordSpout.java
new file mode 100644
index 0000000..e2cdfde
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/topology/WordSpout.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.topology;
+
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.IRichSpout;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+
+import java.util.Map;
+import java.util.Random;
+import java.util.UUID;
+
+public class WordSpout implements IRichSpout {
+    boolean isDistributed;
+    SpoutOutputCollector collector;
+    public static final String[] words = new String[] { "apple", "orange", "pineapple", "banana", "watermelon" };
+
+    public WordSpout() {
+        this(true);
+    }
+
+    public WordSpout(boolean isDistributed) {
+        this.isDistributed = isDistributed;
+    }
+
+    public boolean isDistributed() {
+        return this.isDistributed;
+    }
+
+    @SuppressWarnings("rawtypes")
+    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+        this.collector = collector;
+    }
+
+    public void close() {
+
+    }
+
+    public void nextTuple() {
+        final Random rand = new Random();
+        final String word = words[rand.nextInt(words.length)];
+        this.collector.emit(new Values(word), UUID.randomUUID());
+        Thread.yield();
+    }
+
+    public void ack(Object msgId) {
+
+    }
+
+    public void fail(Object msgId) {
+
+    }
+
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(new Fields("word"));
+    }
+
+    @Override
+    public void activate() {
+    }
+
+    @Override
+    public void deactivate() {
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/PrintFunction.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/PrintFunction.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/PrintFunction.java
new file mode 100644
index 0000000..37d3936
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/PrintFunction.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.trident;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.storm.trident.operation.BaseFunction;
+import org.apache.storm.trident.operation.TridentCollector;
+import org.apache.storm.trident.tuple.TridentTuple;
+
+import java.util.Random;
+
+public class PrintFunction extends BaseFunction {
+
+    private static final Logger LOG = LoggerFactory.getLogger(PrintFunction.class);
+
+    private static final Random RANDOM = new Random();
+
+    @Override
+    public void execute(TridentTuple tuple, TridentCollector tridentCollector) {
+        if(RANDOM.nextInt(1000) > 995) {
+            LOG.info(tuple.toString());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountLookupMapper.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountLookupMapper.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountLookupMapper.java
new file mode 100644
index 0000000..a6ca8c9
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountLookupMapper.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.trident;
+
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.ITuple;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
+import org.apache.storm.redis.common.mapper.RedisLookupMapper;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class WordCountLookupMapper implements RedisLookupMapper {
+    @Override
+    public List<Values> toTuple(ITuple input, Object value) {
+        List<Values> values = new ArrayList<Values>();
+        values.add(new Values(getKeyFromTuple(input), value));
+        return values;
+    }
+
+    @Override
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(new Fields("word", "value"));
+    }
+
+    @Override
+    public RedisDataTypeDescription getDataTypeDescription() {
+        return new RedisDataTypeDescription(RedisDataTypeDescription.RedisDataType.HASH, "test");
+    }
+
+    @Override
+    public String getKeyFromTuple(ITuple tuple) {
+        return "test_" + tuple.getString(0);
+    }
+
+    @Override
+    public String getValueFromTuple(ITuple tuple) {
+        return tuple.getInteger(1).toString();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountStoreMapper.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountStoreMapper.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountStoreMapper.java
new file mode 100644
index 0000000..58df150
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountStoreMapper.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.trident;
+
+import org.apache.storm.tuple.ITuple;
+import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
+import org.apache.storm.redis.common.mapper.RedisStoreMapper;
+
+public class WordCountStoreMapper implements RedisStoreMapper {
+    @Override
+    public RedisDataTypeDescription getDataTypeDescription() {
+        return new RedisDataTypeDescription(RedisDataTypeDescription.RedisDataType.HASH, "test");
+    }
+
+    @Override
+    public String getKeyFromTuple(ITuple tuple) {
+        return "test_" + tuple.getString(0);
+    }
+
+    @Override
+    public String getValueFromTuple(ITuple tuple) {
+        return tuple.getInteger(1).toString();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedis.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedis.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedis.java
new file mode 100644
index 0000000..e3eb0f9
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedis.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.trident;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.redis.common.mapper.RedisLookupMapper;
+import org.apache.storm.redis.common.mapper.RedisStoreMapper;
+import org.apache.storm.redis.trident.state.RedisState;
+import org.apache.storm.redis.trident.state.RedisStateQuerier;
+import org.apache.storm.redis.trident.state.RedisStateUpdater;
+import org.apache.storm.redis.common.config.JedisPoolConfig;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentState;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.testing.FixedBatchSpout;
+
+public class WordCountTridentRedis {
+    public static StormTopology buildTopology(String redisHost, Integer redisPort){
+        Fields fields = new Fields("word", "count");
+        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
+                new Values("storm", 1),
+                new Values("trident", 1),
+                new Values("needs", 1),
+                new Values("javadoc", 1)
+        );
+        spout.setCycle(true);
+
+        JedisPoolConfig poolConfig = new JedisPoolConfig.Builder()
+                                        .setHost(redisHost).setPort(redisPort)
+                                        .build();
+
+        RedisStoreMapper storeMapper = new WordCountStoreMapper();
+        RedisLookupMapper lookupMapper = new WordCountLookupMapper();
+        RedisState.Factory factory = new RedisState.Factory(poolConfig);
+
+        TridentTopology topology = new TridentTopology();
+        Stream stream = topology.newStream("spout1", spout);
+
+        stream.partitionPersist(factory,
+                                fields,
+                                new RedisStateUpdater(storeMapper).withExpire(86400000),
+                                new Fields());
+
+        TridentState state = topology.newStaticState(factory);
+        stream = stream.stateQuery(state, new Fields("word"),
+                                new RedisStateQuerier(lookupMapper),
+                                new Fields("columnName","columnValue"));
+        stream.each(new Fields("word","columnValue"), new PrintFunction(), new Fields());
+        return topology.build();
+    }
+
+    public static void main(String[] args) throws Exception {
+        if (args.length != 3) {
+            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
+            System.exit(1);
+        }
+
+        Integer flag = Integer.valueOf(args[0]);
+        String redisHost = args[1];
+        Integer redisPort = Integer.valueOf(args[2]);
+
+        Config conf = new Config();
+        conf.setMaxSpoutPending(5);
+        if (flag == 0) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort));
+            Thread.sleep(60 * 1000);
+            cluster.killTopology("test_wordCounter_for_redis");
+            cluster.shutdown();
+            System.exit(0);
+        } else if(flag == 1) {
+            conf.setNumWorkers(3);
+            StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort));
+        } else {
+            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java
new file mode 100644
index 0000000..116a58a
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.trident;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.redis.common.mapper.RedisLookupMapper;
+import org.apache.storm.redis.common.mapper.RedisStoreMapper;
+import org.apache.storm.redis.trident.state.RedisClusterState;
+import org.apache.storm.redis.trident.state.RedisClusterStateQuerier;
+import org.apache.storm.redis.trident.state.RedisClusterStateUpdater;
+import org.apache.storm.redis.common.config.JedisClusterConfig;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentState;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.testing.FixedBatchSpout;
+
+import java.net.InetSocketAddress;
+import java.util.HashSet;
+import java.util.Set;
+
+public class WordCountTridentRedisCluster {
+    public static StormTopology buildTopology(String redisHostPort){
+        Fields fields = new Fields("word", "count");
+        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
+                new Values("storm", 1),
+                new Values("trident", 1),
+                new Values("needs", 1),
+                new Values("javadoc", 1)
+        );
+        spout.setCycle(true);
+
+        Set<InetSocketAddress> nodes = new HashSet<InetSocketAddress>();
+        for (String hostPort : redisHostPort.split(",")) {
+            String[] host_port = hostPort.split(":");
+            nodes.add(new InetSocketAddress(host_port[0], Integer.valueOf(host_port[1])));
+        }
+        JedisClusterConfig clusterConfig = new JedisClusterConfig.Builder().setNodes(nodes)
+                                        .build();
+
+        RedisStoreMapper storeMapper = new WordCountStoreMapper();
+        RedisLookupMapper lookupMapper = new WordCountLookupMapper();
+        RedisClusterState.Factory factory = new RedisClusterState.Factory(clusterConfig);
+
+        TridentTopology topology = new TridentTopology();
+        Stream stream = topology.newStream("spout1", spout);
+
+        stream.partitionPersist(factory,
+                                fields,
+                                new RedisClusterStateUpdater(storeMapper).withExpire(86400000),
+                                new Fields());
+
+        TridentState state = topology.newStaticState(factory);
+        stream = stream.stateQuery(state, new Fields("word"),
+                                new RedisClusterStateQuerier(lookupMapper),
+                                new Fields("columnName","columnValue"));
+        stream.each(new Fields("word","columnValue"), new PrintFunction(), new Fields());
+        return topology.build();
+    }
+
+    public static void main(String[] args) throws Exception {
+        if (args.length != 2) {
+            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) 127.0.0.1:6379,127.0.0.1:6380");
+            System.exit(1);
+        }
+
+        Integer flag = Integer.valueOf(args[0]);
+        String redisHostPort = args[1];
+
+        Config conf = new Config();
+        conf.setMaxSpoutPending(5);
+        if (flag == 0) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort));
+            Thread.sleep(60 * 1000);
+            cluster.killTopology("test_wordCounter_for_redis");
+            cluster.shutdown();
+            System.exit(0);
+        } else if(flag == 1) {
+            conf.setNumWorkers(3);
+            StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort));
+        } else {
+            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisClusterMap.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisClusterMap.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisClusterMap.java
new file mode 100644
index 0000000..fafb4e0
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisClusterMap.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.trident;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
+import org.apache.storm.redis.common.mapper.TupleMapper;
+import org.apache.storm.redis.trident.state.RedisClusterMapState;
+import org.apache.storm.redis.common.config.JedisClusterConfig;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentState;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.operation.builtin.MapGet;
+import org.apache.storm.trident.operation.builtin.Sum;
+import org.apache.storm.trident.state.StateFactory;
+import org.apache.storm.trident.testing.FixedBatchSpout;
+
+import java.net.InetSocketAddress;
+import java.util.HashSet;
+import java.util.Set;
+
+public class WordCountTridentRedisClusterMap {
+    public static StormTopology buildTopology(String redisHostPort){
+        Fields fields = new Fields("word", "count");
+        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
+                new Values("storm", 1),
+                new Values("trident", 1),
+                new Values("needs", 1),
+                new Values("javadoc", 1)
+        );
+        spout.setCycle(true);
+
+        Set<InetSocketAddress> nodes = new HashSet<InetSocketAddress>();
+        for (String hostPort : redisHostPort.split(",")) {
+            String[] host_port = hostPort.split(":");
+            nodes.add(new InetSocketAddress(host_port[0], Integer.valueOf(host_port[1])));
+        }
+        JedisClusterConfig clusterConfig = new JedisClusterConfig.Builder().setNodes(nodes)
+                                        .build();
+        RedisDataTypeDescription dataTypeDescription = new RedisDataTypeDescription(
+                RedisDataTypeDescription.RedisDataType.HASH, "test");
+        StateFactory factory = RedisClusterMapState.transactional(clusterConfig, dataTypeDescription);
+
+        TridentTopology topology = new TridentTopology();
+        Stream stream = topology.newStream("spout1", spout);
+
+        TridentState state = stream.groupBy(new Fields("word"))
+                .persistentAggregate(factory, new Fields("count"), new Sum(), new Fields("sum"));
+
+        stream.stateQuery(state, new Fields("word"), new MapGet(), new Fields("sum"))
+                .each(new Fields("word", "sum"), new PrintFunction(), new Fields());
+        return topology.build();
+    }
+
+    public static void main(String[] args) throws Exception {
+        if (args.length != 2) {
+            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) 127.0.0.1:6379,127.0.0.1:6380");
+            System.exit(1);
+        }
+
+        Integer flag = Integer.valueOf(args[0]);
+        String redisHostPort = args[1];
+
+        Config conf = new Config();
+        conf.setMaxSpoutPending(5);
+        if (flag == 0) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort));
+            Thread.sleep(60 * 1000);
+            cluster.killTopology("test_wordCounter_for_redis");
+            cluster.shutdown();
+            System.exit(0);
+        } else if(flag == 1) {
+            conf.setNumWorkers(3);
+            StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort));
+        } else {
+            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisMap.java
----------------------------------------------------------------------
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisMap.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisMap.java
new file mode 100644
index 0000000..384f97c
--- /dev/null
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisMap.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.redis.trident;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
+import org.apache.storm.redis.trident.state.RedisMapState;
+import org.apache.storm.redis.common.config.JedisPoolConfig;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentState;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.operation.builtin.MapGet;
+import org.apache.storm.trident.operation.builtin.Sum;
+import org.apache.storm.trident.state.StateFactory;
+import org.apache.storm.trident.testing.FixedBatchSpout;
+
+public class WordCountTridentRedisMap {
+    public static StormTopology buildTopology(String redisHost, Integer redisPort){
+        Fields fields = new Fields("word", "count");
+        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
+                new Values("storm", 1),
+                new Values("trident", 1),
+                new Values("needs", 1),
+                new Values("javadoc", 1)
+        );
+        spout.setCycle(true);
+
+        JedisPoolConfig poolConfig = new JedisPoolConfig.Builder()
+                                        .setHost(redisHost).setPort(redisPort)
+                                        .build();
+
+        RedisDataTypeDescription dataTypeDescription = new RedisDataTypeDescription(
+                RedisDataTypeDescription.RedisDataType.HASH, "test");
+        StateFactory factory = RedisMapState.transactional(poolConfig, dataTypeDescription);
+
+        TridentTopology topology = new TridentTopology();
+        Stream stream = topology.newStream("spout1", spout);
+
+        TridentState state = stream.groupBy(new Fields("word"))
+                .persistentAggregate(factory, new Fields("count"), new Sum(), new Fields("sum"));
+
+        stream.stateQuery(state, new Fields("word"), new MapGet(), new Fields("sum"))
+                .each(new Fields("word", "sum"), new PrintFunction(), new Fields());
+        return topology.build();
+    }
+
+    public static void main(String[] args) throws Exception {
+        if (args.length != 3) {
+            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
+            System.exit(1);
+        }
+
+        Integer flag = Integer.valueOf(args[0]);
+        String redisHost = args[1];
+        Integer redisPort = Integer.valueOf(args[2]);
+
+        Config conf = new Config();
+        conf.setMaxSpoutPending(5);
+        if (flag == 0) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort));
+            Thread.sleep(60 * 1000);
+            cluster.killTopology("test_wordCounter_for_redis");
+            cluster.shutdown();
+            System.exit(0);
+        } else if(flag == 1) {
+            conf.setNumWorkers(3);
+            StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort));
+        } else {
+            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-solr-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-solr-examples/pom.xml b/examples/storm-solr-examples/pom.xml
new file mode 100644
index 0000000..e64ec5f
--- /dev/null
+++ b/examples/storm-solr-examples/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>storm</artifactId>
+        <groupId>org.apache.storm</groupId>
+        <version>1.1.0-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>storm-solr-examples</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-solr</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+</project>


[09/10] storm git commit: Merge branch 'STORM-1970-1.x' into 1.x-branch

Posted by ka...@apache.org.
Merge branch 'STORM-1970-1.x' into 1.x-branch


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/a2146030
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/a2146030
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/a2146030

Branch: refs/heads/1.x-branch
Commit: a2146030aa55a2f30ced946370f4c3af610be4db
Parents: bc0a1b8 d19816d
Author: Jungtaek Lim <ka...@gmail.com>
Authored: Mon Sep 12 13:15:12 2016 +0900
Committer: Jungtaek Lim <ka...@gmail.com>
Committed: Mon Sep 12 13:15:12 2016 +0900

----------------------------------------------------------------------
 examples/storm-elasticsearch-examples/pom.xml   |  86 ++++++++
 .../elasticsearch/bolt/EsIndexTopology.java     | 120 +++++++++++
 .../storm/elasticsearch/common/EsConstants.java |  22 ++
 .../storm/elasticsearch/common/EsTestUtil.java  |  75 +++++++
 .../trident/TridentEsTopology.java              | 135 +++++++++++++
 examples/storm-hbase-examples/pom.xml           |  86 ++++++++
 .../storm/hbase/topology/LookupWordCount.java   |  79 ++++++++
 .../hbase/topology/PersistentWordCount.java     |  91 +++++++++
 .../storm/hbase/topology/TotalWordCounter.java  |  70 +++++++
 .../storm/hbase/topology/WordCountClient.java   |  57 ++++++
 .../hbase/topology/WordCountValueMapper.java    |  70 +++++++
 .../storm/hbase/topology/WordCounter.java       |  59 ++++++
 .../apache/storm/hbase/topology/WordSpout.java  |  88 ++++++++
 .../storm/hbase/trident/PrintFunction.java      |  40 ++++
 .../storm/hbase/trident/WordCountTrident.java   | 104 ++++++++++
 examples/storm-hdfs-examples/pom.xml            |  86 ++++++++
 .../storm/hdfs/bolt/HdfsFileTopology.java       | 196 ++++++++++++++++++
 .../storm/hdfs/bolt/SequenceFileTopology.java   | 202 +++++++++++++++++++
 .../storm/hdfs/trident/FixedBatchSpout.java     |  97 +++++++++
 .../storm/hdfs/trident/TridentFileTopology.java |  99 +++++++++
 .../hdfs/trident/TridentSequenceTopology.java   |  96 +++++++++
 examples/storm-hive-examples/pom.xml            |  86 ++++++++
 .../storm/hive/bolt/BucketTestHiveTopology.java | 189 +++++++++++++++++
 .../apache/storm/hive/bolt/HiveTopology.java    | 151 ++++++++++++++
 .../hive/bolt/HiveTopologyPartitioned.java      | 153 ++++++++++++++
 .../storm/hive/trident/TridentHiveTopology.java | 199 ++++++++++++++++++
 examples/storm-jdbc-examples/pom.xml            |  86 ++++++++
 .../org/apache/storm/jdbc/spout/UserSpout.java  |  90 +++++++++
 .../jdbc/topology/AbstractUserTopology.java     | 115 +++++++++++
 .../jdbc/topology/UserPersistanceTopology.java  |  62 ++++++
 .../UserPersistanceTridentTopology.java         |  61 ++++++
 examples/storm-kafka-examples/pom.xml           |  86 ++++++++
 .../storm/kafka/TridentKafkaTopology.java       |  91 +++++++++
 examples/storm-mongodb-examples/pom.xml         |  86 ++++++++
 .../storm/mongodb/topology/InsertWordCount.java |  81 ++++++++
 .../storm/mongodb/topology/UpdateWordCount.java |  91 +++++++++
 .../storm/mongodb/topology/WordCounter.java     |  67 ++++++
 .../storm/mongodb/topology/WordSpout.java       |  88 ++++++++
 .../storm/mongodb/trident/WordCountTrident.java |  85 ++++++++
 examples/storm-mqtt-examples/pom.xml            | 125 ++++++++++++
 .../src/main/flux/sample.yaml                   |  62 ++++++
 .../src/main/flux/ssl-sample.yaml               |  78 +++++++
 .../mqtt/examples/CustomMessageMapper.java      |  49 +++++
 .../mqtt/examples/MqttBrokerPublisher.java      | 102 ++++++++++
 .../src/main/resources/log4j2.xml               |  32 +++
 examples/storm-opentsdb-examples/pom.xml        |  86 ++++++++
 .../storm/opentsdb/MetricGenBatchSpout.java     |  94 +++++++++
 .../apache/storm/opentsdb/MetricGenSpout.java   |  72 +++++++
 .../opentsdb/SampleOpenTsdbBoltTopology.java    |  70 +++++++
 .../opentsdb/SampleOpenTsdbTridentTopology.java |  87 ++++++++
 examples/storm-redis-examples/pom.xml           |  86 ++++++++
 .../storm/redis/topology/LookupWordCount.java   | 166 +++++++++++++++
 .../redis/topology/PersistentWordCount.java     | 116 +++++++++++
 .../redis/topology/WhitelistWordCount.java      | 155 ++++++++++++++
 .../storm/redis/topology/WordCounter.java       |  67 ++++++
 .../apache/storm/redis/topology/WordSpout.java  |  88 ++++++++
 .../storm/redis/trident/PrintFunction.java      |  40 ++++
 .../redis/trident/WordCountLookupMapper.java    |  57 ++++++
 .../redis/trident/WordCountStoreMapper.java     |  39 ++++
 .../redis/trident/WordCountTridentRedis.java    |  98 +++++++++
 .../trident/WordCountTridentRedisCluster.java   | 106 ++++++++++
 .../WordCountTridentRedisClusterMap.java        | 101 ++++++++++
 .../redis/trident/WordCountTridentRedisMap.java |  94 +++++++++
 examples/storm-solr-examples/pom.xml            |  86 ++++++++
 .../storm/solr/spout/SolrFieldsSpout.java       |  76 +++++++
 .../apache/storm/solr/spout/SolrJsonSpout.java  | 116 +++++++++++
 .../storm/solr/topology/SolrFieldsTopology.java |  56 +++++
 .../storm/solr/topology/SolrJsonTopology.java   |  48 +++++
 .../storm/solr/topology/SolrTopology.java       |  82 ++++++++
 .../solr/trident/SolrFieldsTridentTopology.java |  45 +++++
 .../solr/trident/SolrJsonTridentTopology.java   |  45 +++++
 .../org/apache/storm/solr/util/TestUtil.java    |  30 +++
 .../elasticsearch/bolt/EsIndexTopology.java     | 120 -----------
 .../trident/TridentEsTopology.java              | 135 -------------
 .../storm/hbase/topology/LookupWordCount.java   |  79 --------
 .../hbase/topology/PersistentWordCount.java     |  91 ---------
 .../storm/hbase/topology/TotalWordCounter.java  |  70 -------
 .../storm/hbase/topology/WordCountClient.java   |  57 ------
 .../hbase/topology/WordCountValueMapper.java    |  70 -------
 .../storm/hbase/topology/WordCounter.java       |  59 ------
 .../apache/storm/hbase/topology/WordSpout.java  |  88 --------
 .../storm/hbase/trident/PrintFunction.java      |  40 ----
 .../storm/hbase/trident/WordCountTrident.java   | 104 ----------
 .../storm/hdfs/bolt/HdfsFileTopology.java       | 196 ------------------
 .../storm/hdfs/bolt/SequenceFileTopology.java   | 202 -------------------
 .../storm/hdfs/trident/FixedBatchSpout.java     |  97 ---------
 .../storm/hdfs/trident/TridentFileTopology.java |  99 ---------
 .../hdfs/trident/TridentSequenceTopology.java   |  96 ---------
 .../storm/hive/bolt/BucketTestHiveTopology.java | 190 -----------------
 .../apache/storm/hive/bolt/HiveTopology.java    | 151 --------------
 .../hive/bolt/HiveTopologyPartitioned.java      | 153 --------------
 .../storm/hive/trident/TridentHiveTopology.java | 199 ------------------
 .../org/apache/storm/jdbc/spout/UserSpout.java  |  90 ---------
 .../jdbc/topology/AbstractUserTopology.java     | 115 -----------
 .../jdbc/topology/UserPersistanceTopology.java  |  62 ------
 .../UserPersistanceTridentTopology.java         |  61 ------
 .../storm/kafka/TridentKafkaTopology.java       |  91 ---------
 .../storm/mongodb/topology/InsertWordCount.java |  81 --------
 .../storm/mongodb/topology/UpdateWordCount.java |  91 ---------
 .../storm/mongodb/topology/WordCounter.java     |  67 ------
 .../storm/mongodb/topology/WordSpout.java       |  88 --------
 .../storm/mongodb/trident/WordCountTrident.java |  85 --------
 external/storm-mqtt/examples/pom.xml            | 115 -----------
 .../examples/src/main/flux/sample.yaml          |  62 ------
 .../examples/src/main/flux/ssl-sample.yaml      |  78 -------
 .../mqtt/examples/CustomMessageMapper.java      |  49 -----
 .../mqtt/examples/MqttBrokerPublisher.java      | 102 ----------
 .../examples/src/main/resources/log4j2.xml      |  32 ---
 external/storm-mqtt/pom.xml                     |   1 -
 .../storm/opentsdb/MetricGenBatchSpout.java     |  94 ---------
 .../apache/storm/opentsdb/MetricGenSpout.java   |  72 -------
 .../opentsdb/SampleOpenTsdbBoltTopology.java    |  70 -------
 .../opentsdb/SampleOpenTsdbTridentTopology.java |  87 --------
 .../storm/redis/topology/LookupWordCount.java   | 166 ---------------
 .../redis/topology/PersistentWordCount.java     | 116 -----------
 .../redis/topology/WhitelistWordCount.java      | 155 --------------
 .../storm/redis/topology/WordCounter.java       |  67 ------
 .../apache/storm/redis/topology/WordSpout.java  |  88 --------
 .../storm/redis/trident/PrintFunction.java      |  40 ----
 .../redis/trident/WordCountLookupMapper.java    |  57 ------
 .../redis/trident/WordCountStoreMapper.java     |  39 ----
 .../redis/trident/WordCountTridentRedis.java    |  98 ---------
 .../trident/WordCountTridentRedisCluster.java   | 106 ----------
 .../WordCountTridentRedisClusterMap.java        | 101 ----------
 .../redis/trident/WordCountTridentRedisMap.java |  94 ---------
 .../storm/solr/spout/SolrFieldsSpout.java       |  76 -------
 .../apache/storm/solr/spout/SolrJsonSpout.java  | 120 -----------
 .../storm/solr/topology/SolrFieldsTopology.java |  56 -----
 .../storm/solr/topology/SolrJsonTopology.java   |  48 -----
 .../storm/solr/topology/SolrTopology.java       |  82 --------
 .../solr/trident/SolrFieldsTridentTopology.java |  45 -----
 .../solr/trident/SolrJsonTridentTopology.java   |  45 -----
 .../org/apache/storm/solr/util/TestUtil.java    |  30 ---
 pom.xml                                         |  23 ++-
 134 files changed, 6498 insertions(+), 5522 deletions(-)
----------------------------------------------------------------------



[02/10] storm git commit: STORM-1970: external project examples refator

Posted by ka...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mqtt/examples/src/main/java/org/apache/storm/mqtt/examples/CustomMessageMapper.java
----------------------------------------------------------------------
diff --git a/external/storm-mqtt/examples/src/main/java/org/apache/storm/mqtt/examples/CustomMessageMapper.java b/external/storm-mqtt/examples/src/main/java/org/apache/storm/mqtt/examples/CustomMessageMapper.java
deleted file mode 100644
index ec5645c..0000000
--- a/external/storm-mqtt/examples/src/main/java/org/apache/storm/mqtt/examples/CustomMessageMapper.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.mqtt.examples;
-
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.mqtt.MqttMessage;
-import org.apache.storm.mqtt.MqttMessageMapper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Given a topic name: "users/{user}/{location}/{deviceId}"
- * and a payload of "{temperature}/{humidity}"
- * emits a tuple containing user(String), deviceId(String), location(String), temperature(float), humidity(float)
- *
- */
-public class CustomMessageMapper implements MqttMessageMapper {
-    private static final Logger LOG = LoggerFactory.getLogger(CustomMessageMapper.class);
-
-
-    public Values toValues(MqttMessage message) {
-        String topic = message.getTopic();
-        String[] topicElements = topic.split("/");
-        String[] payloadElements = new String(message.getMessage()).split("/");
-
-        return new Values(topicElements[2], topicElements[4], topicElements[3], Float.parseFloat(payloadElements[0]),
-                Float.parseFloat(payloadElements[1]));
-    }
-
-    public Fields outputFields() {
-        return new Fields("user", "deviceId", "location", "temperature", "humidity");
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mqtt/examples/src/main/java/org/apache/storm/mqtt/examples/MqttBrokerPublisher.java
----------------------------------------------------------------------
diff --git a/external/storm-mqtt/examples/src/main/java/org/apache/storm/mqtt/examples/MqttBrokerPublisher.java b/external/storm-mqtt/examples/src/main/java/org/apache/storm/mqtt/examples/MqttBrokerPublisher.java
deleted file mode 100644
index fa8389d..0000000
--- a/external/storm-mqtt/examples/src/main/java/org/apache/storm/mqtt/examples/MqttBrokerPublisher.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.mqtt.examples;
-
-
-import org.apache.activemq.broker.BrokerService;
-import org.apache.storm.mqtt.MqttLogger;
-import org.fusesource.mqtt.client.BlockingConnection;
-import org.fusesource.mqtt.client.MQTT;
-import org.fusesource.mqtt.client.QoS;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Random;
-
-public class MqttBrokerPublisher {
-    private static final Logger LOG = LoggerFactory.getLogger(MqttBrokerPublisher.class);
-
-    private static BrokerService broker;
-
-    private static BlockingConnection connection;
-
-
-    public static void startBroker() throws Exception {
-        LOG.info("Starting broker...");
-        broker = new BrokerService();
-        broker.addConnector("mqtt://localhost:1883");
-        broker.setDataDirectory("target");
-        broker.start();
-        LOG.info("MQTT broker started");
-        Runtime.getRuntime().addShutdownHook(new Thread(){
-            @Override
-            public void run() {
-                try {
-                    LOG.info("Shutting down MQTT broker...");
-                    broker.stop();
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-        });
-    }
-
-    public static void startPublisher() throws Exception {
-        MQTT client = new MQTT();
-        client.setTracer(new MqttLogger());
-        client.setHost("tcp://localhost:1883");
-        client.setClientId("MqttBrokerPublisher");
-        connection = client.blockingConnection();
-
-        Runtime.getRuntime().addShutdownHook(new Thread(){
-            @Override
-            public void run() {
-                try {
-                    LOG.info("Shutting down MQTT client...");
-                    connection.disconnect();
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-        });
-
-        connection.connect();
-    }
-
-    public static void publish() throws Exception {
-        String topic = "/users/tgoetz/office/1234";
-        Random rand = new Random();
-        LOG.info("Publishing to topic {}", topic);
-        LOG.info("Cntrl+C to exit.");
-
-        while(true) {
-            int temp = rand.nextInt(100);
-            int hum = rand.nextInt(100);
-            String payload = temp + "/" + hum;
-
-            connection.publish(topic, payload.getBytes(), QoS.AT_LEAST_ONCE, false);
-            Thread.sleep(500);
-        }
-    }
-
-    public static void main(String[] args) throws Exception{
-        startBroker();
-        startPublisher();
-        publish();
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mqtt/examples/src/main/resources/log4j2.xml
----------------------------------------------------------------------
diff --git a/external/storm-mqtt/examples/src/main/resources/log4j2.xml b/external/storm-mqtt/examples/src/main/resources/log4j2.xml
deleted file mode 100644
index bfe57a1..0000000
--- a/external/storm-mqtt/examples/src/main/resources/log4j2.xml
+++ /dev/null
@@ -1,32 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<Configuration status="WARN">
-    <Appenders>
-        <Console name="Console" target="SYSTEM_OUT">
-            <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
-        </Console>
-    </Appenders>
-
-    <Loggers>
-        <Logger name="org.apache.storm.flux.wrappers" level="INFO"/>
-        <Logger name="org.apache.storm.mqtt" level="DEBUG"/>
-        <Root level="error">
-            <AppenderRef ref="Console"/>
-        </Root>
-    </Loggers>
-</Configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-mqtt/pom.xml
----------------------------------------------------------------------
diff --git a/external/storm-mqtt/pom.xml b/external/storm-mqtt/pom.xml
index 5d7a5bb..7535119 100644
--- a/external/storm-mqtt/pom.xml
+++ b/external/storm-mqtt/pom.xml
@@ -51,7 +51,6 @@
 
   <modules>
     <module>core</module>
-    <module>examples</module>
   </modules>
 
 </project>

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/MetricGenBatchSpout.java
----------------------------------------------------------------------
diff --git a/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/MetricGenBatchSpout.java b/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/MetricGenBatchSpout.java
deleted file mode 100644
index b0580f6..0000000
--- a/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/MetricGenBatchSpout.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.storm.opentsdb;
-
-import com.google.common.collect.Lists;
-import org.apache.storm.Config;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.trident.operation.TridentCollector;
-import org.apache.storm.trident.spout.IBatchSpout;
-import org.apache.storm.tuple.Fields;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-/**
- * BatchSpout implementation for metrics generation.
- */
-public class MetricGenBatchSpout implements IBatchSpout {
-
-    private int batchSize;
-    private final Map<Long, List<List<Object>>> batches = new HashMap<>();
-
-    public MetricGenBatchSpout(int batchSize) {
-        this.batchSize = batchSize;
-    }
-
-    @Override
-    public void open(Map conf, TopologyContext context) {
-
-    }
-
-    @Override
-    public void emitBatch(long batchId, TridentCollector collector) {
-        List<List<Object>> values;
-        if(batches.containsKey(batchId)) {
-            values = batches.get(batchId);
-        } else {
-            values = new ArrayList<>();
-            for (int i = 0; i < batchSize; i++) {
-                // tuple values are mapped with
-                // metric, timestamp, value, Map of tagK/tagV respectively.
-                values.add(Lists.newArrayList(Lists.newArrayList("device.temp", System.currentTimeMillis(), new Random().nextLong(),
-                        Collections.singletonMap("loc.id", new Random().nextInt() % 64 + ""))));
-            }
-            batches.put(batchId, values);
-        }
-        for (List<Object> value : values) {
-            collector.emit(value);
-        }
-
-    }
-
-    @Override
-    public void ack(long batchId) {
-        batches.remove(batchId);
-    }
-
-    @Override
-    public void close() {
-
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        Config conf = new Config();
-        conf.setMaxTaskParallelism(1);
-        return conf;
-    }
-
-    @Override
-    public Fields getOutputFields() {
-        return MetricGenSpout.DEFAULT_METRIC_FIELDS;
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/MetricGenSpout.java
----------------------------------------------------------------------
diff --git a/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/MetricGenSpout.java b/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/MetricGenSpout.java
deleted file mode 100644
index 21af196..0000000
--- a/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/MetricGenSpout.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.storm.opentsdb;
-
-import com.google.common.collect.Lists;
-import org.apache.storm.opentsdb.bolt.TupleOpenTsdbDatapointMapper;
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.base.BaseRichSpout;
-import org.apache.storm.tuple.Fields;
-
-import java.util.Collections;
-import java.util.Map;
-import java.util.Random;
-
-/**
- * Spout to generate tuples containing metric data.
- */
-public class MetricGenSpout extends BaseRichSpout {
-
-    public static final Fields DEFAULT_METRIC_FIELDS =
-            new Fields(TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER.getMetricField(),
-                    TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER.getTimestampField(),
-                    TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER.getValueField(),
-                    TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER.getTagsField());
-
-    private Map conf;
-    private TopologyContext context;
-    private SpoutOutputCollector collector;
-
-    @Override
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(DEFAULT_METRIC_FIELDS);
-    }
-
-    @Override
-    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-        this.conf = conf;
-        this.context = context;
-        this.collector = collector;
-    }
-
-    @Override
-    public void nextTuple() {
-        try {
-            Thread.sleep(100);
-        } catch (InterruptedException e) {
-            // ignore
-        }
-        // tuple values are mapped with
-        // metric, timestamp, value, Map of tagK/tagV respectively.
-        collector.emit(Lists.newArrayList("device.temp", System.currentTimeMillis(), new Random().nextLong(),
-                Collections.singletonMap("loc.id", new Random().nextInt() % 64 + "")));
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/SampleOpenTsdbBoltTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/SampleOpenTsdbBoltTopology.java b/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/SampleOpenTsdbBoltTopology.java
deleted file mode 100644
index 6c511b8..0000000
--- a/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/SampleOpenTsdbBoltTopology.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.storm.opentsdb;
-
-import org.apache.storm.opentsdb.bolt.OpenTsdbBolt;
-import org.apache.storm.opentsdb.bolt.TupleOpenTsdbDatapointMapper;
-import org.apache.storm.opentsdb.client.OpenTsdbClient;
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.topology.TopologyBuilder;
-
-import java.util.Collections;
-
-/**
- * Sample application to use OpenTSDB bolt.
- */
-public class SampleOpenTsdbBoltTopology {
-
-    public static void main(String[] args) throws Exception {
-        if(args.length == 0) {
-            throw new IllegalArgumentException("There should be at least one argument. Run as `SampleOpenTsdbBoltTopology <tsdb-url>`");
-        }
-
-        TopologyBuilder topologyBuilder = new TopologyBuilder();
-
-        topologyBuilder.setSpout("metric-gen", new MetricGenSpout(), 5);
-
-        String openTsdbUrl = args[0];
-        OpenTsdbClient.Builder builder =  OpenTsdbClient.newBuilder(openTsdbUrl).sync(30_000).returnDetails();
-        final OpenTsdbBolt openTsdbBolt = new OpenTsdbBolt(builder, Collections.singletonList(TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER));
-        openTsdbBolt.withBatchSize(10).withFlushInterval(2).failTupleForFailedMetrics();
-        topologyBuilder.setBolt("opentsdb", openTsdbBolt).shuffleGrouping("metric-gen");
-
-        Config conf = new Config();
-        conf.setDebug(true);
-
-        if (args.length > 1) {
-            conf.setNumWorkers(3);
-
-            StormSubmitter.submitTopologyWithProgressBar(args[1], conf, topologyBuilder.createTopology());
-        } else {
-            conf.setMaxTaskParallelism(3);
-
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("word-count", conf, topologyBuilder.createTopology());
-
-            Thread.sleep(30000);
-
-            cluster.shutdown();
-            System.exit(0);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/SampleOpenTsdbTridentTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/SampleOpenTsdbTridentTopology.java b/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/SampleOpenTsdbTridentTopology.java
deleted file mode 100644
index db51a8a..0000000
--- a/external/storm-opentsdb/src/test/java/org/apache/storm/opentsdb/SampleOpenTsdbTridentTopology.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.storm.opentsdb;
-
-import org.apache.storm.opentsdb.bolt.TupleOpenTsdbDatapointMapper;
-import org.apache.storm.opentsdb.client.OpenTsdbClient;
-import org.apache.storm.opentsdb.trident.OpenTsdbStateFactory;
-import org.apache.storm.opentsdb.trident.OpenTsdbStateUpdater;
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.operation.Consumer;
-import org.apache.storm.trident.tuple.TridentTuple;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collections;
-
-/**
- * Sample trident topology to store time series metrics in to OpenTsdb.
- */
-public class SampleOpenTsdbTridentTopology {
-    private static final Logger LOG = LoggerFactory.getLogger(SampleOpenTsdbTridentTopology.class);
-
-    public static void main(String[] args) throws Exception {
-        if(args.length == 0) {
-            throw new IllegalArgumentException("There should be at least one argument. Run as `SampleOpenTsdbTridentTopology <tsdb-url>`");
-        }
-
-        String tsdbUrl = args[0];
-
-
-        final OpenTsdbClient.Builder openTsdbClientBuilder = OpenTsdbClient.newBuilder(tsdbUrl);
-        final OpenTsdbStateFactory openTsdbStateFactory =
-                new OpenTsdbStateFactory(openTsdbClientBuilder,
-                        Collections.singletonList(TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER));
-
-        TridentTopology tridentTopology = new TridentTopology();
-        final Stream stream = tridentTopology.newStream("metric-tsdb-stream", new MetricGenBatchSpout(10));
-
-        stream.peek(new Consumer() {
-            @Override
-            public void accept(TridentTuple input) {
-                LOG.info("########### Received tuple: [{}]", input);
-            }
-        }).partitionPersist(openTsdbStateFactory, MetricGenSpout.DEFAULT_METRIC_FIELDS, new OpenTsdbStateUpdater());
-
-
-        Config conf = new Config();
-        conf.setDebug(true);
-
-        if (args.length > 1) {
-            conf.setNumWorkers(3);
-
-            StormSubmitter.submitTopologyWithProgressBar(args[1], conf, tridentTopology.build());
-        } else {
-            conf.setMaxTaskParallelism(3);
-
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("word-count", conf, tridentTopology.build());
-
-            Thread.sleep(30000);
-
-            cluster.shutdown();
-            System.exit(0);
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/topology/LookupWordCount.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/topology/LookupWordCount.java b/external/storm-redis/src/test/java/org/apache/storm/redis/topology/LookupWordCount.java
deleted file mode 100644
index f62b7b0..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/topology/LookupWordCount.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.topology;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.topology.base.BaseRichBolt;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.ITuple;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-import com.google.common.collect.Lists;
-import org.apache.storm.redis.bolt.RedisLookupBolt;
-import org.apache.storm.redis.common.config.JedisPoolConfig;
-import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
-import org.apache.storm.redis.common.mapper.RedisLookupMapper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-public class LookupWordCount {
-    private static final String WORD_SPOUT = "WORD_SPOUT";
-    private static final String LOOKUP_BOLT = "LOOKUP_BOLT";
-    private static final String PRINT_BOLT = "PRINT_BOLT";
-
-    private static final String TEST_REDIS_HOST = "127.0.0.1";
-    private static final int TEST_REDIS_PORT = 6379;
-
-    public static class PrintWordTotalCountBolt extends BaseRichBolt {
-        private static final Logger LOG = LoggerFactory.getLogger(PrintWordTotalCountBolt.class);
-        private static final Random RANDOM = new Random();
-        private OutputCollector collector;
-
-        @Override
-        public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
-            this.collector = collector;
-        }
-
-        @Override
-        public void execute(Tuple input) {
-            String wordName = input.getStringByField("wordName");
-            String countStr = input.getStringByField("count");
-
-            // print lookup result with low probability
-            if(RANDOM.nextInt(1000) > 995) {
-                int count = 0;
-                if (countStr != null) {
-                    count = Integer.parseInt(countStr);
-                }
-                LOG.info("Lookup result - word : " + wordName + " / count : " + count);
-            }
-
-            collector.ack(input);
-        }
-
-        @Override
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        }
-    }
-
-    public static void main(String[] args) throws Exception {
-        Config config = new Config();
-
-        String host = TEST_REDIS_HOST;
-        int port = TEST_REDIS_PORT;
-
-        if (args.length >= 2) {
-            host = args[0];
-            port = Integer.parseInt(args[1]);
-        }
-
-        JedisPoolConfig poolConfig = new JedisPoolConfig.Builder()
-                .setHost(host).setPort(port).build();
-
-        WordSpout spout = new WordSpout();
-        RedisLookupMapper lookupMapper = setupLookupMapper();
-        RedisLookupBolt lookupBolt = new RedisLookupBolt(poolConfig, lookupMapper);
-
-        PrintWordTotalCountBolt printBolt = new PrintWordTotalCountBolt();
-
-        //wordspout -> lookupbolt
-        TopologyBuilder builder = new TopologyBuilder();
-        builder.setSpout(WORD_SPOUT, spout, 1);
-        builder.setBolt(LOOKUP_BOLT, lookupBolt, 1).shuffleGrouping(WORD_SPOUT);
-        builder.setBolt(PRINT_BOLT, printBolt, 1).shuffleGrouping(LOOKUP_BOLT);
-
-        if (args.length == 2) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test", config, builder.createTopology());
-            Thread.sleep(30000);
-            cluster.killTopology("test");
-            cluster.shutdown();
-            System.exit(0);
-        } else if (args.length == 3) {
-            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
-        } else{
-            System.out.println("Usage: LookupWordCount <redis host> <redis port> (topology name)");
-        }
-    }
-
-    private static RedisLookupMapper setupLookupMapper() {
-        return new WordCountRedisLookupMapper();
-    }
-
-    private static class WordCountRedisLookupMapper implements RedisLookupMapper {
-        private RedisDataTypeDescription description;
-        private final String hashKey = "wordCount";
-
-        public WordCountRedisLookupMapper() {
-            description = new RedisDataTypeDescription(
-                    RedisDataTypeDescription.RedisDataType.HASH, hashKey);
-        }
-
-        @Override
-        public List<Values> toTuple(ITuple input, Object value) {
-            String member = getKeyFromTuple(input);
-            List<Values> values = Lists.newArrayList();
-            values.add(new Values(member, value));
-            return values;
-        }
-
-        @Override
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            declarer.declare(new Fields("wordName", "count"));
-        }
-
-        @Override
-        public RedisDataTypeDescription getDataTypeDescription() {
-            return description;
-        }
-
-        @Override
-        public String getKeyFromTuple(ITuple tuple) {
-            return tuple.getStringByField("word");
-        }
-
-        @Override
-        public String getValueFromTuple(ITuple tuple) {
-            return null;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/topology/PersistentWordCount.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/topology/PersistentWordCount.java b/external/storm-redis/src/test/java/org/apache/storm/redis/topology/PersistentWordCount.java
deleted file mode 100644
index d46bab6..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/topology/PersistentWordCount.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.topology;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.ITuple;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.redis.bolt.AbstractRedisBolt;
-import org.apache.storm.redis.bolt.RedisStoreBolt;
-import org.apache.storm.redis.common.config.JedisClusterConfig;
-import org.apache.storm.redis.common.config.JedisPoolConfig;
-import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
-import org.apache.storm.redis.common.mapper.RedisStoreMapper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import redis.clients.jedis.JedisCommands;
-import redis.clients.jedis.exceptions.JedisConnectionException;
-import redis.clients.jedis.exceptions.JedisException;
-
-public class PersistentWordCount {
-    private static final String WORD_SPOUT = "WORD_SPOUT";
-    private static final String COUNT_BOLT = "COUNT_BOLT";
-    private static final String STORE_BOLT = "STORE_BOLT";
-
-    private static final String TEST_REDIS_HOST = "127.0.0.1";
-    private static final int TEST_REDIS_PORT = 6379;
-
-    public static void main(String[] args) throws Exception {
-        Config config = new Config();
-
-        String host = TEST_REDIS_HOST;
-        int port = TEST_REDIS_PORT;
-
-        if (args.length >= 2) {
-            host = args[0];
-            port = Integer.parseInt(args[1]);
-        }
-
-        JedisPoolConfig poolConfig = new JedisPoolConfig.Builder()
-                .setHost(host).setPort(port).build();
-
-        WordSpout spout = new WordSpout();
-        WordCounter bolt = new WordCounter();
-        RedisStoreMapper storeMapper = setupStoreMapper();
-        RedisStoreBolt storeBolt = new RedisStoreBolt(poolConfig, storeMapper);
-
-        // wordSpout ==> countBolt ==> RedisBolt
-        TopologyBuilder builder = new TopologyBuilder();
-
-        builder.setSpout(WORD_SPOUT, spout, 1);
-        builder.setBolt(COUNT_BOLT, bolt, 1).fieldsGrouping(WORD_SPOUT, new Fields("word"));
-        builder.setBolt(STORE_BOLT, storeBolt, 1).shuffleGrouping(COUNT_BOLT);
-
-        if (args.length == 2) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test", config, builder.createTopology());
-            Thread.sleep(30000);
-            cluster.killTopology("test");
-            cluster.shutdown();
-            System.exit(0);
-        } else if (args.length == 3) {
-            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
-        } else {
-            System.out.println("Usage: PersistentWordCount <redis host> <redis port> (topology name)");
-        }
-    }
-
-    private static RedisStoreMapper setupStoreMapper() {
-        return new WordCountStoreMapper();
-    }
-
-    private static class WordCountStoreMapper implements RedisStoreMapper {
-        private RedisDataTypeDescription description;
-        private final String hashKey = "wordCount";
-
-        public WordCountStoreMapper() {
-            description = new RedisDataTypeDescription(
-                    RedisDataTypeDescription.RedisDataType.HASH, hashKey);
-        }
-
-        @Override
-        public RedisDataTypeDescription getDataTypeDescription() {
-            return description;
-        }
-
-        @Override
-        public String getKeyFromTuple(ITuple tuple) {
-            return tuple.getStringByField("word");
-        }
-
-        @Override
-        public String getValueFromTuple(ITuple tuple) {
-            return tuple.getStringByField("count");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WhitelistWordCount.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WhitelistWordCount.java b/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WhitelistWordCount.java
deleted file mode 100644
index bcb2e0b..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WhitelistWordCount.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.topology;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.redis.bolt.RedisFilterBolt;
-import org.apache.storm.redis.common.config.JedisPoolConfig;
-import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
-import org.apache.storm.redis.common.mapper.RedisFilterMapper;
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.topology.base.BaseRichBolt;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.ITuple;
-import org.apache.storm.tuple.Tuple;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Map;
-import java.util.Random;
-
-public class WhitelistWordCount {
-    private static final String WORD_SPOUT = "WORD_SPOUT";
-    private static final String WHITELIST_BOLT = "WHITELIST_BOLT";
-    private static final String COUNT_BOLT = "COUNT_BOLT";
-    private static final String PRINT_BOLT = "PRINT_BOLT";
-
-    private static final String TEST_REDIS_HOST = "127.0.0.1";
-    private static final int TEST_REDIS_PORT = 6379;
-
-    public static class PrintWordTotalCountBolt extends BaseRichBolt {
-        private static final Logger LOG = LoggerFactory.getLogger(PrintWordTotalCountBolt.class);
-        private static final Random RANDOM = new Random();
-        private OutputCollector collector;
-
-        @Override
-        public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
-            this.collector = collector;
-        }
-
-        @Override
-        public void execute(Tuple input) {
-            String wordName = input.getStringByField("word");
-            String countStr = input.getStringByField("count");
-
-            // print lookup result with low probability
-            if(RANDOM.nextInt(1000) > 995) {
-                int count = 0;
-                if (countStr != null) {
-                    count = Integer.parseInt(countStr);
-                }
-                LOG.info("Count result - word : " + wordName + " / count : " + count);
-            }
-
-            collector.ack(input);
-        }
-
-        @Override
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        }
-    }
-
-    public static void main(String[] args) throws Exception {
-        Config config = new Config();
-
-        String host = TEST_REDIS_HOST;
-        int port = TEST_REDIS_PORT;
-
-        if (args.length >= 2) {
-            host = args[0];
-            port = Integer.parseInt(args[1]);
-        }
-
-        JedisPoolConfig poolConfig = new JedisPoolConfig.Builder()
-                .setHost(host).setPort(port).build();
-
-        WordSpout spout = new WordSpout();
-        RedisFilterMapper filterMapper = setupWhitelistMapper();
-        RedisFilterBolt whitelistBolt = new RedisFilterBolt(poolConfig, filterMapper);
-        WordCounter wordCounterBolt = new WordCounter();
-        PrintWordTotalCountBolt printBolt = new PrintWordTotalCountBolt();
-
-        TopologyBuilder builder = new TopologyBuilder();
-        builder.setSpout(WORD_SPOUT, spout, 1);
-        builder.setBolt(WHITELIST_BOLT, whitelistBolt, 1).shuffleGrouping(WORD_SPOUT);
-        builder.setBolt(COUNT_BOLT, wordCounterBolt, 1).fieldsGrouping(WHITELIST_BOLT, new Fields("word"));
-        builder.setBolt(PRINT_BOLT, printBolt, 1).shuffleGrouping(COUNT_BOLT);
-
-        if (args.length == 2) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test", config, builder.createTopology());
-            Thread.sleep(30000);
-            cluster.killTopology("test");
-            cluster.shutdown();
-            System.exit(0);
-        } else if (args.length == 3) {
-            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
-        } else{
-            System.out.println("Usage: WhitelistWordCount <redis host> <redis port> (topology name)");
-        }
-    }
-
-    private static RedisFilterMapper setupWhitelistMapper() {
-        return new WhitelistWordFilterMapper();
-    }
-
-    private static class WhitelistWordFilterMapper implements RedisFilterMapper {
-        private RedisDataTypeDescription description;
-        private final String setKey = "whitelist";
-
-        public WhitelistWordFilterMapper() {
-            description = new RedisDataTypeDescription(
-                    RedisDataTypeDescription.RedisDataType.SET, setKey);
-        }
-
-        @Override
-        public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            declarer.declare(new Fields("word"));
-        }
-
-        @Override
-        public RedisDataTypeDescription getDataTypeDescription() {
-            return description;
-        }
-
-        @Override
-        public String getKeyFromTuple(ITuple tuple) {
-            return tuple.getStringByField("word");
-        }
-
-        @Override
-        public String getValueFromTuple(ITuple tuple) {
-            return null;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WordCounter.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WordCounter.java b/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WordCounter.java
deleted file mode 100644
index 6fa930c..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WordCounter.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.topology;
-
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.BasicOutputCollector;
-import org.apache.storm.topology.IBasicBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-import com.google.common.collect.Maps;
-
-import java.util.Map;
-
-import static org.apache.storm.utils.Utils.tuple;
-
-public class WordCounter implements IBasicBolt {
-    private Map<String, Integer> wordCounter = Maps.newHashMap();
-
-    @SuppressWarnings("rawtypes")
-    public void prepare(Map stormConf, TopologyContext context) {
-    }
-
-    public void execute(Tuple input, BasicOutputCollector collector) {
-        String word = input.getStringByField("word");
-        int count;
-        if (wordCounter.containsKey(word)) {
-            count = wordCounter.get(word) + 1;
-            wordCounter.put(word, wordCounter.get(word) + 1);
-        } else {
-            count = 1;
-        }
-
-        wordCounter.put(word, count);
-        collector.emit(new Values(word, String.valueOf(count)));
-    }
-
-    public void cleanup() {
-
-    }
-
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(new Fields("word", "count"));
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WordSpout.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WordSpout.java b/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WordSpout.java
deleted file mode 100644
index e2cdfde..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/topology/WordSpout.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.topology;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-
-import java.util.Map;
-import java.util.Random;
-import java.util.UUID;
-
-public class WordSpout implements IRichSpout {
-    boolean isDistributed;
-    SpoutOutputCollector collector;
-    public static final String[] words = new String[] { "apple", "orange", "pineapple", "banana", "watermelon" };
-
-    public WordSpout() {
-        this(true);
-    }
-
-    public WordSpout(boolean isDistributed) {
-        this.isDistributed = isDistributed;
-    }
-
-    public boolean isDistributed() {
-        return this.isDistributed;
-    }
-
-    @SuppressWarnings("rawtypes")
-    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-        this.collector = collector;
-    }
-
-    public void close() {
-
-    }
-
-    public void nextTuple() {
-        final Random rand = new Random();
-        final String word = words[rand.nextInt(words.length)];
-        this.collector.emit(new Values(word), UUID.randomUUID());
-        Thread.yield();
-    }
-
-    public void ack(Object msgId) {
-
-    }
-
-    public void fail(Object msgId) {
-
-    }
-
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(new Fields("word"));
-    }
-
-    @Override
-    public void activate() {
-    }
-
-    @Override
-    public void deactivate() {
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/trident/PrintFunction.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/PrintFunction.java b/external/storm-redis/src/test/java/org/apache/storm/redis/trident/PrintFunction.java
deleted file mode 100644
index 37d3936..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/PrintFunction.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.trident;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.storm.trident.operation.BaseFunction;
-import org.apache.storm.trident.operation.TridentCollector;
-import org.apache.storm.trident.tuple.TridentTuple;
-
-import java.util.Random;
-
-public class PrintFunction extends BaseFunction {
-
-    private static final Logger LOG = LoggerFactory.getLogger(PrintFunction.class);
-
-    private static final Random RANDOM = new Random();
-
-    @Override
-    public void execute(TridentTuple tuple, TridentCollector tridentCollector) {
-        if(RANDOM.nextInt(1000) > 995) {
-            LOG.info(tuple.toString());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountLookupMapper.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountLookupMapper.java b/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountLookupMapper.java
deleted file mode 100644
index a6ca8c9..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountLookupMapper.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.trident;
-
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.ITuple;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
-import org.apache.storm.redis.common.mapper.RedisLookupMapper;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class WordCountLookupMapper implements RedisLookupMapper {
-    @Override
-    public List<Values> toTuple(ITuple input, Object value) {
-        List<Values> values = new ArrayList<Values>();
-        values.add(new Values(getKeyFromTuple(input), value));
-        return values;
-    }
-
-    @Override
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(new Fields("word", "value"));
-    }
-
-    @Override
-    public RedisDataTypeDescription getDataTypeDescription() {
-        return new RedisDataTypeDescription(RedisDataTypeDescription.RedisDataType.HASH, "test");
-    }
-
-    @Override
-    public String getKeyFromTuple(ITuple tuple) {
-        return "test_" + tuple.getString(0);
-    }
-
-    @Override
-    public String getValueFromTuple(ITuple tuple) {
-        return tuple.getInteger(1).toString();
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountStoreMapper.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountStoreMapper.java b/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountStoreMapper.java
deleted file mode 100644
index 58df150..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountStoreMapper.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.trident;
-
-import org.apache.storm.tuple.ITuple;
-import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
-import org.apache.storm.redis.common.mapper.RedisStoreMapper;
-
-public class WordCountStoreMapper implements RedisStoreMapper {
-    @Override
-    public RedisDataTypeDescription getDataTypeDescription() {
-        return new RedisDataTypeDescription(RedisDataTypeDescription.RedisDataType.HASH, "test");
-    }
-
-    @Override
-    public String getKeyFromTuple(ITuple tuple) {
-        return "test_" + tuple.getString(0);
-    }
-
-    @Override
-    public String getValueFromTuple(ITuple tuple) {
-        return tuple.getInteger(1).toString();
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedis.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedis.java b/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedis.java
deleted file mode 100644
index e3eb0f9..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedis.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.trident;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.redis.common.mapper.RedisLookupMapper;
-import org.apache.storm.redis.common.mapper.RedisStoreMapper;
-import org.apache.storm.redis.trident.state.RedisState;
-import org.apache.storm.redis.trident.state.RedisStateQuerier;
-import org.apache.storm.redis.trident.state.RedisStateUpdater;
-import org.apache.storm.redis.common.config.JedisPoolConfig;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentState;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.testing.FixedBatchSpout;
-
-public class WordCountTridentRedis {
-    public static StormTopology buildTopology(String redisHost, Integer redisPort){
-        Fields fields = new Fields("word", "count");
-        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
-                new Values("storm", 1),
-                new Values("trident", 1),
-                new Values("needs", 1),
-                new Values("javadoc", 1)
-        );
-        spout.setCycle(true);
-
-        JedisPoolConfig poolConfig = new JedisPoolConfig.Builder()
-                                        .setHost(redisHost).setPort(redisPort)
-                                        .build();
-
-        RedisStoreMapper storeMapper = new WordCountStoreMapper();
-        RedisLookupMapper lookupMapper = new WordCountLookupMapper();
-        RedisState.Factory factory = new RedisState.Factory(poolConfig);
-
-        TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("spout1", spout);
-
-        stream.partitionPersist(factory,
-                                fields,
-                                new RedisStateUpdater(storeMapper).withExpire(86400000),
-                                new Fields());
-
-        TridentState state = topology.newStaticState(factory);
-        stream = stream.stateQuery(state, new Fields("word"),
-                                new RedisStateQuerier(lookupMapper),
-                                new Fields("columnName","columnValue"));
-        stream.each(new Fields("word","columnValue"), new PrintFunction(), new Fields());
-        return topology.build();
-    }
-
-    public static void main(String[] args) throws Exception {
-        if (args.length != 3) {
-            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
-            System.exit(1);
-        }
-
-        Integer flag = Integer.valueOf(args[0]);
-        String redisHost = args[1];
-        Integer redisPort = Integer.valueOf(args[2]);
-
-        Config conf = new Config();
-        conf.setMaxSpoutPending(5);
-        if (flag == 0) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort));
-            Thread.sleep(60 * 1000);
-            cluster.killTopology("test_wordCounter_for_redis");
-            cluster.shutdown();
-            System.exit(0);
-        } else if(flag == 1) {
-            conf.setNumWorkers(3);
-            StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort));
-        } else {
-            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java b/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java
deleted file mode 100644
index 116a58a..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.trident;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.redis.common.mapper.RedisLookupMapper;
-import org.apache.storm.redis.common.mapper.RedisStoreMapper;
-import org.apache.storm.redis.trident.state.RedisClusterState;
-import org.apache.storm.redis.trident.state.RedisClusterStateQuerier;
-import org.apache.storm.redis.trident.state.RedisClusterStateUpdater;
-import org.apache.storm.redis.common.config.JedisClusterConfig;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentState;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.testing.FixedBatchSpout;
-
-import java.net.InetSocketAddress;
-import java.util.HashSet;
-import java.util.Set;
-
-public class WordCountTridentRedisCluster {
-    public static StormTopology buildTopology(String redisHostPort){
-        Fields fields = new Fields("word", "count");
-        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
-                new Values("storm", 1),
-                new Values("trident", 1),
-                new Values("needs", 1),
-                new Values("javadoc", 1)
-        );
-        spout.setCycle(true);
-
-        Set<InetSocketAddress> nodes = new HashSet<InetSocketAddress>();
-        for (String hostPort : redisHostPort.split(",")) {
-            String[] host_port = hostPort.split(":");
-            nodes.add(new InetSocketAddress(host_port[0], Integer.valueOf(host_port[1])));
-        }
-        JedisClusterConfig clusterConfig = new JedisClusterConfig.Builder().setNodes(nodes)
-                                        .build();
-
-        RedisStoreMapper storeMapper = new WordCountStoreMapper();
-        RedisLookupMapper lookupMapper = new WordCountLookupMapper();
-        RedisClusterState.Factory factory = new RedisClusterState.Factory(clusterConfig);
-
-        TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("spout1", spout);
-
-        stream.partitionPersist(factory,
-                                fields,
-                                new RedisClusterStateUpdater(storeMapper).withExpire(86400000),
-                                new Fields());
-
-        TridentState state = topology.newStaticState(factory);
-        stream = stream.stateQuery(state, new Fields("word"),
-                                new RedisClusterStateQuerier(lookupMapper),
-                                new Fields("columnName","columnValue"));
-        stream.each(new Fields("word","columnValue"), new PrintFunction(), new Fields());
-        return topology.build();
-    }
-
-    public static void main(String[] args) throws Exception {
-        if (args.length != 2) {
-            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) 127.0.0.1:6379,127.0.0.1:6380");
-            System.exit(1);
-        }
-
-        Integer flag = Integer.valueOf(args[0]);
-        String redisHostPort = args[1];
-
-        Config conf = new Config();
-        conf.setMaxSpoutPending(5);
-        if (flag == 0) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort));
-            Thread.sleep(60 * 1000);
-            cluster.killTopology("test_wordCounter_for_redis");
-            cluster.shutdown();
-            System.exit(0);
-        } else if(flag == 1) {
-            conf.setNumWorkers(3);
-            StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort));
-        } else {
-            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisClusterMap.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisClusterMap.java b/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisClusterMap.java
deleted file mode 100644
index fafb4e0..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisClusterMap.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.trident;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
-import org.apache.storm.redis.common.mapper.TupleMapper;
-import org.apache.storm.redis.trident.state.RedisClusterMapState;
-import org.apache.storm.redis.common.config.JedisClusterConfig;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentState;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.operation.builtin.MapGet;
-import org.apache.storm.trident.operation.builtin.Sum;
-import org.apache.storm.trident.state.StateFactory;
-import org.apache.storm.trident.testing.FixedBatchSpout;
-
-import java.net.InetSocketAddress;
-import java.util.HashSet;
-import java.util.Set;
-
-public class WordCountTridentRedisClusterMap {
-    public static StormTopology buildTopology(String redisHostPort){
-        Fields fields = new Fields("word", "count");
-        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
-                new Values("storm", 1),
-                new Values("trident", 1),
-                new Values("needs", 1),
-                new Values("javadoc", 1)
-        );
-        spout.setCycle(true);
-
-        Set<InetSocketAddress> nodes = new HashSet<InetSocketAddress>();
-        for (String hostPort : redisHostPort.split(",")) {
-            String[] host_port = hostPort.split(":");
-            nodes.add(new InetSocketAddress(host_port[0], Integer.valueOf(host_port[1])));
-        }
-        JedisClusterConfig clusterConfig = new JedisClusterConfig.Builder().setNodes(nodes)
-                                        .build();
-        RedisDataTypeDescription dataTypeDescription = new RedisDataTypeDescription(
-                RedisDataTypeDescription.RedisDataType.HASH, "test");
-        StateFactory factory = RedisClusterMapState.transactional(clusterConfig, dataTypeDescription);
-
-        TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("spout1", spout);
-
-        TridentState state = stream.groupBy(new Fields("word"))
-                .persistentAggregate(factory, new Fields("count"), new Sum(), new Fields("sum"));
-
-        stream.stateQuery(state, new Fields("word"), new MapGet(), new Fields("sum"))
-                .each(new Fields("word", "sum"), new PrintFunction(), new Fields());
-        return topology.build();
-    }
-
-    public static void main(String[] args) throws Exception {
-        if (args.length != 2) {
-            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) 127.0.0.1:6379,127.0.0.1:6380");
-            System.exit(1);
-        }
-
-        Integer flag = Integer.valueOf(args[0]);
-        String redisHostPort = args[1];
-
-        Config conf = new Config();
-        conf.setMaxSpoutPending(5);
-        if (flag == 0) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort));
-            Thread.sleep(60 * 1000);
-            cluster.killTopology("test_wordCounter_for_redis");
-            cluster.shutdown();
-            System.exit(0);
-        } else if(flag == 1) {
-            conf.setNumWorkers(3);
-            StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort));
-        } else {
-            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisMap.java
----------------------------------------------------------------------
diff --git a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisMap.java b/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisMap.java
deleted file mode 100644
index 384f97c..0000000
--- a/external/storm-redis/src/test/java/org/apache/storm/redis/trident/WordCountTridentRedisMap.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.redis.trident;
-
-import org.apache.storm.Config;
-import org.apache.storm.LocalCluster;
-import org.apache.storm.StormSubmitter;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.redis.common.mapper.RedisDataTypeDescription;
-import org.apache.storm.redis.trident.state.RedisMapState;
-import org.apache.storm.redis.common.config.JedisPoolConfig;
-import org.apache.storm.trident.Stream;
-import org.apache.storm.trident.TridentState;
-import org.apache.storm.trident.TridentTopology;
-import org.apache.storm.trident.operation.builtin.MapGet;
-import org.apache.storm.trident.operation.builtin.Sum;
-import org.apache.storm.trident.state.StateFactory;
-import org.apache.storm.trident.testing.FixedBatchSpout;
-
-public class WordCountTridentRedisMap {
-    public static StormTopology buildTopology(String redisHost, Integer redisPort){
-        Fields fields = new Fields("word", "count");
-        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
-                new Values("storm", 1),
-                new Values("trident", 1),
-                new Values("needs", 1),
-                new Values("javadoc", 1)
-        );
-        spout.setCycle(true);
-
-        JedisPoolConfig poolConfig = new JedisPoolConfig.Builder()
-                                        .setHost(redisHost).setPort(redisPort)
-                                        .build();
-
-        RedisDataTypeDescription dataTypeDescription = new RedisDataTypeDescription(
-                RedisDataTypeDescription.RedisDataType.HASH, "test");
-        StateFactory factory = RedisMapState.transactional(poolConfig, dataTypeDescription);
-
-        TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("spout1", spout);
-
-        TridentState state = stream.groupBy(new Fields("word"))
-                .persistentAggregate(factory, new Fields("count"), new Sum(), new Fields("sum"));
-
-        stream.stateQuery(state, new Fields("word"), new MapGet(), new Fields("sum"))
-                .each(new Fields("word", "sum"), new PrintFunction(), new Fields());
-        return topology.build();
-    }
-
-    public static void main(String[] args) throws Exception {
-        if (args.length != 3) {
-            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
-            System.exit(1);
-        }
-
-        Integer flag = Integer.valueOf(args[0]);
-        String redisHost = args[1];
-        Integer redisPort = Integer.valueOf(args[2]);
-
-        Config conf = new Config();
-        conf.setMaxSpoutPending(5);
-        if (flag == 0) {
-            LocalCluster cluster = new LocalCluster();
-            cluster.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort));
-            Thread.sleep(60 * 1000);
-            cluster.killTopology("test_wordCounter_for_redis");
-            cluster.shutdown();
-            System.exit(0);
-        } else if(flag == 1) {
-            conf.setNumWorkers(3);
-            StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort));
-        } else {
-            System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-solr/src/test/java/org/apache/storm/solr/spout/SolrFieldsSpout.java
----------------------------------------------------------------------
diff --git a/external/storm-solr/src/test/java/org/apache/storm/solr/spout/SolrFieldsSpout.java b/external/storm-solr/src/test/java/org/apache/storm/solr/spout/SolrFieldsSpout.java
deleted file mode 100644
index 8e3390d..0000000
--- a/external/storm-solr/src/test/java/org/apache/storm/solr/spout/SolrFieldsSpout.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.solr.spout;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.base.BaseRichSpout;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import com.google.common.collect.Lists;
-import org.apache.storm.solr.util.TestUtil;
-
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-public class SolrFieldsSpout extends BaseRichSpout {
-    private SpoutOutputCollector collector;
-    public static final List<Values> listValues = Lists.newArrayList(
-            getValues("1"), getValues("2"), getValues("3"));
-
-    private static Values getValues(String suf) {
-        String suffix = "_fields_test_val_" + suf;
-        return new Values(
-                "id" + suffix,
-                TestUtil.getDate(),
-                "dc_title" + suffix,
-                "Hugo%Miguel%Louro" + suffix,           // Multivalue field split by non default token %
-                "dynamic_field" + suffix + "_txt",      // to match dynamic fields of the form "*_txt"
-                "non_matching_field" + suffix);         // this field won't be indexed by solr
-    }
-
-    @Override
-    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-        this.collector = collector;
-    }
-
-    @Override
-    public void nextTuple() {
-        final Random rand = new Random();
-        final Values values = listValues.get(rand.nextInt(listValues.size()));
-        collector.emit(values);
-        Thread.yield();
-    }
-
-    @Override
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(getOutputFields());
-    }
-
-    public Fields getOutputFields() {
-        return new Fields("id","date","dc_title","author","dynamic_field_txt","non_matching_field");
-    }
-
-    @Override
-    public void close() {
-        super.close();
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-solr/src/test/java/org/apache/storm/solr/spout/SolrJsonSpout.java
----------------------------------------------------------------------
diff --git a/external/storm-solr/src/test/java/org/apache/storm/solr/spout/SolrJsonSpout.java b/external/storm-solr/src/test/java/org/apache/storm/solr/spout/SolrJsonSpout.java
deleted file mode 100644
index bb0c83c..0000000
--- a/external/storm-solr/src/test/java/org/apache/storm/solr/spout/SolrJsonSpout.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.solr.spout;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.base.BaseRichSpout;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import com.google.common.collect.Lists;
-import com.google.gson.Gson;
-import org.apache.storm.solr.util.TestUtil;
-import org.junit.Test;
-
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-public class SolrJsonSpout extends BaseRichSpout {
-    private SpoutOutputCollector collector;
-    private static final List<Values> listValues = Lists.newArrayList(
-            getJsonValues("1"), getJsonValues("2"), getJsonValues("3"), // Tuple contains String Object in JSON format
-            getPojoValues("1"), getPojoValues("2"));    // Tuple contains Java object that must be serialized to JSON by SolrJsonMapper
-
-    @Override
-    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-        this.collector = collector;
-    }
-
-    @Override
-    public void nextTuple() {
-        final Random rand = new Random();
-        final Values values = listValues.get(rand.nextInt(listValues.size()));
-        collector.emit(values);
-        Thread.yield();
-    }
-
-    @Override
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(getOutputFields());
-    }
-
-    public Fields getOutputFields() {
-        return new Fields("JSON");
-    }
-
-    @Override
-    public void close() {   //TODO
-        super.close();
-    }
-
-    // ====
-
-    private static Values getJsonValues(String suf) {
-        String suffix = "_json_test_val_" + suf;
-        return new Values((new JsonSchema(suffix)).toJson());
-    }
-
-    private static Values getPojoValues(String suf) {
-        String suffix = "_json_test_val_" + suf;
-        return new Values(new JsonSchema(suffix));
-    }
-
-    public static class JsonSchema {
-        private String id;
-        private String date;
-        private String dc_title;
-
-        private static final Gson gson = new Gson();
-
-        public JsonSchema(String suffix) {
-            this.id = "id" + suffix;
-            this.date = TestUtil.getDate();
-            this.dc_title = "dc_title" + suffix;
-        }
-
-        public JsonSchema(String id, String date, String dc_title) {
-            this.id = id;
-            this.date = date;
-            this.dc_title = dc_title;
-        }
-
-        // copy constructor
-        public JsonSchema(JsonSchema jsonSchema) {
-            this.id = jsonSchema.id;
-            this.date = jsonSchema.date;
-            this.dc_title = jsonSchema.dc_title;
-        }
-
-        public String toJson() {
-            String json = gson.toJson(this);
-            System.out.println(json);   // TODO log
-            return json;
-        }
-
-        public static JsonSchema fromJson(String jsonStr) {
-            return new JsonSchema(gson.fromJson(jsonStr, JsonSchema.class));
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrFieldsTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrFieldsTopology.java b/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrFieldsTopology.java
deleted file mode 100644
index 5c9f16d..0000000
--- a/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrFieldsTopology.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.solr.topology;
-
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.solr.bolt.SolrUpdateBolt;
-import org.apache.storm.solr.config.CountBasedCommit;
-import org.apache.storm.solr.config.SolrCommitStrategy;
-import org.apache.storm.solr.mapper.SolrFieldsMapper;
-import org.apache.storm.solr.mapper.SolrMapper;
-import org.apache.storm.solr.schema.builder.RestJsonSchemaBuilder;
-import org.apache.storm.solr.spout.SolrFieldsSpout;
-
-import java.io.IOException;
-
-public class SolrFieldsTopology extends SolrTopology {
-        public static void main(String[] args) throws Exception {
-            SolrFieldsTopology solrFieldsTopology = new SolrFieldsTopology();
-            solrFieldsTopology.run(args);
-        }
-
-    protected SolrMapper getSolrMapper() throws IOException {
-        return new SolrFieldsMapper.Builder(
-                new RestJsonSchemaBuilder("localhost", "8983", COLLECTION), COLLECTION)
-                    .setMultiValueFieldToken("%").build();
-    }
-
-    protected SolrCommitStrategy getSolrCommitStgy() {
-        return new CountBasedCommit(2);         // To Commit to Solr and Ack according to the commit strategy
-    }
-
-    protected StormTopology getTopology() throws IOException {
-        TopologyBuilder builder = new TopologyBuilder();
-        builder.setSpout("SolrFieldsSpout", new SolrFieldsSpout());
-        builder.setBolt("SolrUpdateBolt", new SolrUpdateBolt(getSolrConfig(), getSolrMapper(), getSolrCommitStgy()))
-                .shuffleGrouping("SolrFieldsSpout");
-        return builder.createTopology();
-    }
-}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrJsonTopology.java
----------------------------------------------------------------------
diff --git a/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrJsonTopology.java b/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrJsonTopology.java
deleted file mode 100644
index 24e6b5e..0000000
--- a/external/storm-solr/src/test/java/org/apache/storm/solr/topology/SolrJsonTopology.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.storm.solr.topology;
-
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.topology.TopologyBuilder;
-import org.apache.storm.solr.bolt.SolrUpdateBolt;
-import org.apache.storm.solr.mapper.SolrJsonMapper;
-import org.apache.storm.solr.mapper.SolrMapper;
-import org.apache.storm.solr.spout.SolrJsonSpout;
-
-import java.io.IOException;
-
-public class SolrJsonTopology extends SolrTopology {
-    public static void main(String[] args) throws Exception {
-        SolrJsonTopology solrJsonTopology = new SolrJsonTopology();
-        solrJsonTopology.run(args);
-    }
-
-    protected SolrMapper getSolrMapper() throws IOException {
-        final String jsonTupleField = "JSON";
-        return new SolrJsonMapper.Builder(COLLECTION, jsonTupleField).build();
-    }
-
-    protected StormTopology getTopology() throws IOException {
-        TopologyBuilder builder = new TopologyBuilder();
-        builder.setSpout("SolrJsonSpout", new SolrJsonSpout());
-        builder.setBolt("SolrUpdateBolt", new SolrUpdateBolt(getSolrConfig(), getSolrMapper(), getSolrCommitStgy()))
-                .shuffleGrouping("SolrJsonSpout");
-        return builder.createTopology();
-    }
-}


[06/10] storm git commit: STORM-1970: external project examples refator

Posted by ka...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/TridentSequenceTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/TridentSequenceTopology.java b/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/TridentSequenceTopology.java
new file mode 100644
index 0000000..788b33c
--- /dev/null
+++ b/examples/storm-hdfs-examples/src/main/java/org/apache/storm/hdfs/trident/TridentSequenceTopology.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.hdfs.trident;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.hdfs.common.rotation.MoveFileAction;
+import org.apache.storm.hdfs.trident.format.*;
+import org.apache.storm.hdfs.trident.rotation.FileRotationPolicy;
+import org.apache.storm.hdfs.trident.rotation.FileSizeRotationPolicy;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentState;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.state.StateFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.util.Map;
+
+public class TridentSequenceTopology {
+
+    public static StormTopology buildTopology(String hdfsUrl){
+        FixedBatchSpout spout = new FixedBatchSpout(new Fields("sentence", "key"), 1000, new Values("the cow jumped over the moon", 1l),
+                new Values("the man went to the store and bought some candy", 2l), new Values("four score and seven years ago", 3l),
+                new Values("how many apples can you eat", 4l), new Values("to be or not to be the person", 5l));
+        spout.setCycle(true);
+
+        TridentTopology topology = new TridentTopology();
+        Stream stream = topology.newStream("spout1", spout);
+
+        Fields hdfsFields = new Fields("sentence", "key");
+
+        FileNameFormat fileNameFormat = new DefaultFileNameFormat()
+                .withPath("/tmp/trident")
+                .withPrefix("trident")
+                .withExtension(".seq");
+
+        FileRotationPolicy rotationPolicy = new FileSizeRotationPolicy(5.0f, FileSizeRotationPolicy.Units.MB);
+
+        HdfsState.Options seqOpts = new HdfsState.SequenceFileOptions()
+                .withFileNameFormat(fileNameFormat)
+                .withSequenceFormat(new DefaultSequenceFormat("key", "sentence"))
+                .withRotationPolicy(rotationPolicy)
+                .withFsUrl(hdfsUrl)
+                .withConfigKey("hdfs.config")
+                .addRotationAction(new MoveFileAction().toDestination("/tmp/dest2/"));
+        StateFactory factory = new HdfsStateFactory().withOptions(seqOpts);
+
+        TridentState state = stream
+                .partitionPersist(factory, hdfsFields, new HdfsUpdater(), new Fields());
+
+        return topology.build();
+    }
+
+    public static void main(String[] args) throws Exception {
+        Config conf = new Config();
+        conf.setMaxSpoutPending(5);
+
+        Yaml yaml = new Yaml();
+        InputStream in = new FileInputStream(args[1]);
+        Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in);
+        in.close();
+        conf.put("hdfs.config", yamlConf);
+
+        if (args.length == 2) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("wordCounter", conf, buildTopology(args[0]));
+            Thread.sleep(120 * 1000);
+        } else if(args.length == 3) {
+            conf.setNumWorkers(3);
+            StormSubmitter.submitTopology(args[2], conf, buildTopology(args[0]));
+        } else{
+            System.out.println("Usage: TridentSequenceTopology [hdfs url] [hdfs yaml config file] <topology name>");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hive-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-hive-examples/pom.xml b/examples/storm-hive-examples/pom.xml
new file mode 100644
index 0000000..bb0220a
--- /dev/null
+++ b/examples/storm-hive-examples/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>storm</artifactId>
+        <groupId>org.apache.storm</groupId>
+        <version>1.1.0-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>storm-hive-examples</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-hive</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java
new file mode 100644
index 0000000..781a539
--- /dev/null
+++ b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.hive.bolt;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.topology.base.BaseRichSpout;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+
+import org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper;
+import org.apache.storm.hive.common.HiveOptions;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+
+
+public class BucketTestHiveTopology {
+    static final String USER_SPOUT_ID = "user-spout";
+    static final String BOLT_ID = "my-hive-bolt";
+    static final String TOPOLOGY_NAME = "hive-test-topology1";
+
+    public static void main(String[] args) throws Exception {
+        if ((args == null) || (args.length < 7)) {
+            System.out.println("Usage: BucketTestHiveTopology metastoreURI "
+                    + "dbName tableName dataFileLocation hiveBatchSize " +
+                    "hiveTickTupl]eIntervalSecs workers  [topologyNamey] [keytab file]"
+                    + " [principal name] ");
+            System.exit(1);
+        }
+        String metaStoreURI = args[0];
+        String dbName = args[1];
+        String tblName = args[2];
+        String sourceFileLocation = args[3];
+        Integer hiveBatchSize = Integer.parseInt(args[4]);
+        Integer hiveTickTupleIntervalSecs = Integer.parseInt(args[5]);
+        Integer workers = Integer.parseInt(args[6]);
+        String[] colNames = { "ss_sold_date_sk", "ss_sold_time_sk", "ss_item_sk",
+                "ss_customer_sk", "ss_cdemo_sk", "ss_hdemo_sk", "ss_addr_sk",
+                "ss_store_sk", "ss_promo_sk", "ss_ticket_number", "ss_quantity",
+                "ss_wholesale_cost", "ss_list_price", "ss_sales_price",
+                "ss_ext_discount_amt", "ss_ext_sales_price",
+                "ss_ext_wholesale_cost", "ss_ext_list_price", "ss_ext_tax",
+                "ss_coupon_amt", "ss_net_paid", "ss_net_paid_inc_tax",
+                "ss_net_profit" };
+        Config config = new Config();
+        config.setNumWorkers(workers);
+        UserDataSpout spout = new UserDataSpout().withDataFile(sourceFileLocation);
+        DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper()
+                .withColumnFields(new Fields(colNames)).withTimeAsPartitionField("yyyy/MM/dd");
+        HiveOptions hiveOptions;
+        hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
+            .withTxnsPerBatch(10)
+            .withBatchSize(hiveBatchSize);
+        // doing below because its affecting storm metrics most likely
+        // had to make tick tuple a mandatory argument since its positional
+        if (hiveTickTupleIntervalSecs > 0) {
+            hiveOptions.withTickTupleInterval(hiveTickTupleIntervalSecs);
+        }
+        if (args.length == 10) {
+            hiveOptions.withKerberosKeytab(args[8]).withKerberosPrincipal(args[9]);
+        }
+        HiveBolt hiveBolt = new HiveBolt(hiveOptions);
+        TopologyBuilder builder = new TopologyBuilder();
+        builder.setSpout(USER_SPOUT_ID, spout, 1);
+        // SentenceSpout --> MyBolt
+        builder.setBolt(BOLT_ID, hiveBolt, 14)
+                .shuffleGrouping(USER_SPOUT_ID);
+        if (args.length == 6) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
+            waitForSeconds(20);
+            cluster.killTopology(TOPOLOGY_NAME);
+            System.out.println("cluster begin to shutdown");
+            cluster.shutdown();
+            System.out.println("cluster shutdown");
+            System.exit(0);
+        } else {
+            StormSubmitter.submitTopology(args[7], config, builder.createTopology());
+        }
+    }
+
+    public static void waitForSeconds(int seconds) {
+        try {
+            Thread.sleep(seconds * 1000);
+        } catch (InterruptedException e) {
+        }
+    }
+
+    public static class UserDataSpout extends BaseRichSpout {
+        private ConcurrentHashMap<UUID, Values> pending;
+        private SpoutOutputCollector collector;
+        private String filePath;
+        private BufferedReader br;
+        private int count = 0;
+        private long total = 0L;
+        private String[] outputFields = { "ss_sold_date_sk", "ss_sold_time_sk",
+                "ss_item_sk", "ss_customer_sk", "ss_cdemo_sk", "ss_hdemo_sk",
+                "ss_addr_sk", "ss_store_sk", "ss_promo_sk", "ss_ticket_number",
+                "ss_quantity", "ss_wholesale_cost", "ss_list_price",
+                "ss_sales_price", "ss_ext_discount_amt", "ss_ext_sales_price",
+                "ss_ext_wholesale_cost", "ss_ext_list_price", "ss_ext_tax",
+                "ss_coupon_amt", "ss_net_paid", "ss_net_paid_inc_tax",
+                "ss_net_profit" };
+
+        public UserDataSpout withDataFile (String filePath) {
+            this.filePath = filePath;
+            return this;
+        }
+
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+            declarer.declare(new Fields(this.outputFields));
+        }
+
+        public void open(Map config, TopologyContext context,
+                         SpoutOutputCollector collector) {
+            this.collector = collector;
+            this.pending = new ConcurrentHashMap<UUID, Values>();
+            try {
+                this.br = new BufferedReader(new FileReader(new File(this
+                        .filePath)));
+            } catch (Exception ex) {
+                ex.printStackTrace();
+            }
+        }
+
+        public void nextTuple() {
+            String line;
+            try {
+                if ((line = br.readLine()) != null) {
+                    System.out.println("*********" + line);
+                    String[] values = line.split("\\|", -1);
+                    // above gives an extra empty string at the end. below
+                    // removes that
+                    values = Arrays.copyOfRange(values, 0,
+                            this.outputFields.length);
+                    Values tupleValues = new Values(values);
+                    UUID msgId = UUID.randomUUID();
+                    this.pending.put(msgId, tupleValues);
+                    this.collector.emit(tupleValues, msgId);
+                    count++;
+                    total++;
+                    if (count > 1000) {
+                        count = 0;
+                        System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
+                    }
+                }
+            } catch (IOException ex) {
+                ex.printStackTrace();
+            }
+        }
+
+        public void ack(Object msgId) {
+            this.pending.remove(msgId);
+        }
+
+        public void fail(Object msgId) {
+            System.out.println("**** RESENDING FAILED TUPLE");
+            this.collector.emit(this.pending.get(msgId), msgId);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopology.java b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopology.java
new file mode 100644
index 0000000..4afd298
--- /dev/null
+++ b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopology.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.hive.bolt;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.topology.base.BaseRichSpout;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+
+import org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper;
+import org.apache.storm.hive.common.HiveOptions;
+
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+
+
+public class HiveTopology {
+    static final String USER_SPOUT_ID = "user-spout";
+    static final String BOLT_ID = "my-hive-bolt";
+    static final String TOPOLOGY_NAME = "hive-test-topology1";
+
+    public static void main(String[] args) throws Exception {
+        String metaStoreURI = args[0];
+        String dbName = args[1];
+        String tblName = args[2];
+        String[] colNames = {"id","name","phone","street","city","state"};
+        Config config = new Config();
+        config.setNumWorkers(1);
+        UserDataSpout spout = new UserDataSpout();
+        DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper()
+                .withColumnFields(new Fields(colNames));
+        HiveOptions hiveOptions;
+        if (args.length == 6) {
+            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
+                .withTxnsPerBatch(10)
+                .withBatchSize(100)
+                .withIdleTimeout(10)
+                .withKerberosKeytab(args[4])
+                .withKerberosPrincipal(args[5]);
+        } else {
+            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
+                .withTxnsPerBatch(10)
+                .withBatchSize(100)
+                .withIdleTimeout(10)
+                .withMaxOpenConnections(1);
+        }
+
+        HiveBolt hiveBolt = new HiveBolt(hiveOptions);
+        TopologyBuilder builder = new TopologyBuilder();
+        builder.setSpout(USER_SPOUT_ID, spout, 1);
+        // SentenceSpout --> MyBolt
+        builder.setBolt(BOLT_ID, hiveBolt, 1)
+                .shuffleGrouping(USER_SPOUT_ID);
+        if (args.length == 3) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
+            waitForSeconds(20);
+            cluster.killTopology(TOPOLOGY_NAME);
+            System.out.println("cluster begin to shutdown");
+            cluster.shutdown();
+            System.out.println("cluster shutdown");
+            System.exit(0);
+        } else if(args.length >= 4) {
+            StormSubmitter.submitTopology(args[3], config, builder.createTopology());
+        } else {
+            System.out.println("Usage: HiveTopology metastoreURI dbName tableName [topologyNamey] [keytab file] [principal name]");
+        }
+    }
+
+    public static void waitForSeconds(int seconds) {
+        try {
+            Thread.sleep(seconds * 1000);
+        } catch (InterruptedException e) {
+        }
+    }
+
+    public static class UserDataSpout extends BaseRichSpout {
+        private ConcurrentHashMap<UUID, Values> pending;
+        private SpoutOutputCollector collector;
+        private String[] sentences = {
+                "1,user1,123456,street1,sunnyvale,ca",
+                "2,user2,123456,street2,sunnyvale,ca",
+                "3,user3,123456,street3,san jose,ca",
+                "4,user4,123456,street4,san jose,ca",
+        };
+        private int index = 0;
+        private int count = 0;
+        private long total = 0L;
+
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+            declarer.declare(new Fields("id","name","phone","street","city","state"));
+        }
+
+        public void open(Map config, TopologyContext context,
+                         SpoutOutputCollector collector) {
+            this.collector = collector;
+            this.pending = new ConcurrentHashMap<UUID, Values>();
+        }
+
+        public void nextTuple() {
+            String[] user = sentences[index].split(",");
+            Values values = new Values(Integer.parseInt(user[0]),user[1],user[2],user[3],user[4],user[5]);
+            UUID msgId = UUID.randomUUID();
+            this.pending.put(msgId, values);
+            this.collector.emit(values, msgId);
+            index++;
+            if (index >= sentences.length) {
+                index = 0;
+            }
+            count++;
+            total++;
+            if(count > 1000){
+                count = 0;
+                System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
+            }
+            Thread.yield();
+        }
+
+        public void ack(Object msgId) {
+            this.pending.remove(msgId);
+        }
+
+        public void fail(Object msgId) {
+            System.out.println("**** RESENDING FAILED TUPLE");
+            this.collector.emit(this.pending.get(msgId), msgId);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java
----------------------------------------------------------------------
diff --git a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java
new file mode 100644
index 0000000..a52c490
--- /dev/null
+++ b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.hive.bolt;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.topology.base.BaseRichSpout;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.utils.Utils;
+
+import org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper;
+import org.apache.storm.hive.common.HiveOptions;
+
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+
+
+public class HiveTopologyPartitioned {
+    static final String USER_SPOUT_ID = "hive-user-spout-partitioned";
+    static final String BOLT_ID = "my-hive-bolt-partitioned";
+    static final String TOPOLOGY_NAME = "hive-test-topology-partitioned";
+
+    public static void main(String[] args) throws Exception {
+        String metaStoreURI = args[0];
+        String dbName = args[1];
+        String tblName = args[2];
+        String[] partNames = {"city","state"};
+        String[] colNames = {"id","name","phone","street"};
+        Config config = new Config();
+        config.setNumWorkers(1);
+        UserDataSpout spout = new UserDataSpout();
+        DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper()
+            .withColumnFields(new Fields(colNames))
+            .withPartitionFields(new Fields(partNames));
+        HiveOptions hiveOptions;
+        if (args.length == 6) {
+            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
+                .withTxnsPerBatch(10)
+                .withBatchSize(1000)
+                .withIdleTimeout(10)
+                .withKerberosKeytab(args[4])
+                .withKerberosPrincipal(args[5]);
+        } else {
+            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
+                .withTxnsPerBatch(10)
+                .withBatchSize(1000)
+                .withIdleTimeout(10);
+        }
+
+        HiveBolt hiveBolt = new HiveBolt(hiveOptions);
+        TopologyBuilder builder = new TopologyBuilder();
+        builder.setSpout(USER_SPOUT_ID, spout, 1);
+        // SentenceSpout --> MyBolt
+        builder.setBolt(BOLT_ID, hiveBolt, 1)
+                .shuffleGrouping(USER_SPOUT_ID);
+        if (args.length == 3) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology());
+            waitForSeconds(20);
+            cluster.killTopology(TOPOLOGY_NAME);
+            System.out.println("cluster begin to shutdown");
+            cluster.shutdown();
+            System.out.println("cluster shutdown");
+            System.exit(0);
+        } else if(args.length >= 4) {
+            StormSubmitter.submitTopology(args[3], config, builder.createTopology());
+        } else {
+            System.out.println("Usage: HiveTopologyPartitioned metastoreURI dbName tableName [topologyNamey] [keytab file] [principal name]");
+        }
+    }
+
+    public static void waitForSeconds(int seconds) {
+        try {
+            Thread.sleep(seconds * 1000);
+        } catch (InterruptedException e) {
+        }
+    }
+
+    public static class UserDataSpout extends BaseRichSpout {
+        private ConcurrentHashMap<UUID, Values> pending;
+        private SpoutOutputCollector collector;
+        private String[] sentences = {
+                "1,user1,123456,street1,sunnyvale,ca",
+                "2,user2,123456,street2,sunnyvale,ca",
+                "3,user3,123456,street3,san jose,ca",
+                "4,user4,123456,street4,san jose,ca",
+        };
+        private int index = 0;
+        private int count = 0;
+        private long total = 0L;
+
+        public void declareOutputFields(OutputFieldsDeclarer declarer) {
+            declarer.declare(new Fields("id","name","phone","street","city","state"));
+        }
+
+        public void open(Map config, TopologyContext context,
+                         SpoutOutputCollector collector) {
+            this.collector = collector;
+            this.pending = new ConcurrentHashMap<UUID, Values>();
+        }
+
+        public void nextTuple() {
+            String[] user = sentences[index].split(",");
+            Values values = new Values(Integer.parseInt(user[0]),user[1],user[2],user[3],user[4],user[5]);
+            UUID msgId = UUID.randomUUID();
+            this.pending.put(msgId, values);
+            this.collector.emit(values, msgId);
+            index++;
+            if (index >= sentences.length) {
+                index = 0;
+            }
+            count++;
+            total++;
+            if(count > 1000){
+		Utils.sleep(1000);
+                count = 0;
+                System.out.println("Pending count: " + this.pending.size() + ", total: " + this.total);
+            }
+        }
+
+        public void ack(Object msgId) {
+            this.pending.remove(msgId);
+        }
+
+        public void fail(Object msgId) {
+            System.out.println("**** RESENDING FAILED TUPLE");
+            this.collector.emit(this.pending.get(msgId), msgId);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/trident/TridentHiveTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/trident/TridentHiveTopology.java b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/trident/TridentHiveTopology.java
new file mode 100644
index 0000000..86a35e6
--- /dev/null
+++ b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/trident/TridentHiveTopology.java
@@ -0,0 +1,199 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.storm.hive.trident;
+
+
+import org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper;
+import org.apache.storm.hive.common.HiveOptions;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.hooks.SubmitterHookException;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.trident.operation.TridentCollector;
+import org.apache.storm.trident.spout.IBatchSpout;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentState;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.state.StateFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class TridentHiveTopology {
+    private static final Logger LOG = LoggerFactory.getLogger(TridentHiveTopology.class);
+
+    public static StormTopology buildTopology(String metaStoreURI, String dbName, String tblName, Object keytab, Object principal) {
+        int batchSize = 100;
+        FixedBatchSpout spout = new FixedBatchSpout(batchSize);
+        spout.setCycle(true);
+        TridentTopology topology = new TridentTopology();
+        Stream stream = topology.newStream("hiveTridentspout1",spout);
+        String[] partNames = {"city","state"};
+        String[] colNames = {"id","name","phone","street"};
+        Fields hiveFields = new Fields("id","name","phone","street","city","state");
+        DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper()
+            .withColumnFields(new Fields(colNames))
+            .withPartitionFields(new Fields(partNames));
+        HiveOptions hiveOptions;
+        if (keytab != null && principal != null) {
+            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
+                .withTxnsPerBatch(10)
+                .withBatchSize(batchSize)
+                .withIdleTimeout(10)
+                .withCallTimeout(30000)
+                .withKerberosKeytab((String)keytab)
+                .withKerberosPrincipal((String)principal);
+        } else  {
+            hiveOptions = new HiveOptions(metaStoreURI,dbName,tblName,mapper)
+                .withTxnsPerBatch(10)
+                .withBatchSize(batchSize)
+                .withCallTimeout(30000)
+                .withIdleTimeout(10);
+        }
+        StateFactory factory = new HiveStateFactory().withOptions(hiveOptions);
+        TridentState state = stream.partitionPersist(factory, hiveFields, new HiveUpdater(), new Fields());
+        return topology.build();
+    }
+
+    public static void waitForSeconds(int seconds) {
+        try {
+            Thread.sleep(seconds * 1000);
+        } catch (InterruptedException e) {
+        }
+    }
+
+    public static void main(String[] args) {
+        String metaStoreURI = args[0];
+        String dbName = args[1];
+        String tblName = args[2];
+        Config conf = new Config();
+        conf.setMaxSpoutPending(5);
+        if(args.length == 3) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("tridentHiveTopology", conf, buildTopology(metaStoreURI, dbName, tblName,null,null));
+            LOG.info("waiting for 60 seconds");
+            waitForSeconds(60);
+            LOG.info("killing topology");
+            cluster.killTopology("tridenHiveTopology");
+            LOG.info("cluster shutdown");
+            cluster.shutdown();
+            LOG.info("cluster shutdown");
+            System.exit(0);
+        } else if(args.length == 4) {
+            try {
+                StormSubmitter.submitTopology(args[3], conf, buildTopology(metaStoreURI, dbName, tblName,null,null));
+            } catch(SubmitterHookException e) {
+                LOG.warn("Topology is submitted but invoking ISubmitterHook failed", e);
+            } catch (Exception e) {
+                LOG.warn("Failed to submit topology ", e);
+            }
+        } else if (args.length == 6) {
+            try {
+                StormSubmitter.submitTopology(args[3], conf, buildTopology(metaStoreURI, dbName, tblName,args[4],args[5]));
+            } catch(SubmitterHookException e) {
+                LOG.warn("Topology is submitted but invoking ISubmitterHook failed", e);
+            } catch (Exception e) {
+                LOG.warn("Failed to submit topology ", e);
+            }
+        } else {
+            LOG.info("Usage: TridentHiveTopology metastoreURI dbName tableName [topologyNamey]");
+        }
+    }
+
+    public static class FixedBatchSpout implements IBatchSpout {
+        int maxBatchSize;
+        HashMap<Long, List<List<Object>>> batches = new HashMap<Long, List<List<Object>>>();
+        private Values[] outputs = {
+            new Values("1","user1","123456","street1","sunnyvale","ca"),
+            new Values("2","user2","123456","street2","sunnyvale","ca"),
+            new Values("3","user3","123456","street3","san jose","ca"),
+            new Values("4","user4","123456","street4","san jose","ca"),
+        };
+        private int index = 0;
+        boolean cycle = false;
+
+        public FixedBatchSpout(int maxBatchSize) {
+            this.maxBatchSize = maxBatchSize;
+        }
+
+        public void setCycle(boolean cycle) {
+            this.cycle = cycle;
+        }
+
+        @Override
+        public Fields getOutputFields() {
+            return new Fields("id","name","phone","street","city","state");
+        }
+
+        @Override
+        public void open(Map conf, TopologyContext context) {
+            index = 0;
+        }
+
+        @Override
+        public void emitBatch(long batchId, TridentCollector collector) {
+            List<List<Object>> batch = this.batches.get(batchId);
+            if(batch == null){
+                batch = new ArrayList<List<Object>>();
+                if(index>=outputs.length && cycle) {
+                    index = 0;
+                }
+                for(int i=0; i < maxBatchSize; index++, i++) {
+                    if(index == outputs.length){
+                        index=0;
+                    }
+                    batch.add(outputs[index]);
+                }
+                this.batches.put(batchId, batch);
+            }
+            for(List<Object> list : batch){
+                collector.emit(list);
+            }
+        }
+
+        @Override
+        public void ack(long batchId) {
+            this.batches.remove(batchId);
+        }
+
+        @Override
+        public void close() {
+        }
+
+        @Override
+        public Map getComponentConfiguration() {
+            Config conf = new Config();
+            conf.setMaxTaskParallelism(1);
+            return conf;
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-jdbc-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-jdbc-examples/pom.xml b/examples/storm-jdbc-examples/pom.xml
new file mode 100644
index 0000000..dc90fcd
--- /dev/null
+++ b/examples/storm-jdbc-examples/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>storm</artifactId>
+        <groupId>org.apache.storm</groupId>
+        <version>1.1.0-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>storm-jdbc-examples</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-jdbc</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/spout/UserSpout.java
----------------------------------------------------------------------
diff --git a/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/spout/UserSpout.java b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/spout/UserSpout.java
new file mode 100644
index 0000000..fdcd053
--- /dev/null
+++ b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/spout/UserSpout.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.jdbc.spout;
+
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.IRichSpout;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import com.google.common.collect.Lists;
+
+import java.util.*;
+
+public class UserSpout implements IRichSpout {
+    boolean isDistributed;
+    SpoutOutputCollector collector;
+    public static final List<Values> rows = Lists.newArrayList(
+            new Values(1,"peter",System.currentTimeMillis()),
+            new Values(2,"bob",System.currentTimeMillis()),
+            new Values(3,"alice",System.currentTimeMillis()));
+
+    public UserSpout() {
+        this(true);
+    }
+
+    public UserSpout(boolean isDistributed) {
+        this.isDistributed = isDistributed;
+    }
+
+    public boolean isDistributed() {
+        return this.isDistributed;
+    }
+
+    @SuppressWarnings("rawtypes")
+    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+        this.collector = collector;
+    }
+
+    public void close() {
+
+    }
+
+    public void nextTuple() {
+        final Random rand = new Random();
+        final Values row = rows.get(rand.nextInt(rows.size() - 1));
+        this.collector.emit(row);
+        Thread.yield();
+    }
+
+    public void ack(Object msgId) {
+
+    }
+
+    public void fail(Object msgId) {
+
+    }
+
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(new Fields("user_id","user_name","create_date"));
+    }
+
+    @Override
+    public void activate() {
+    }
+
+    @Override
+    public void deactivate() {
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java
new file mode 100644
index 0000000..ec7ca36
--- /dev/null
+++ b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.jdbc.topology;
+
+import org.apache.storm.Config;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import org.apache.storm.jdbc.common.Column;
+import org.apache.storm.jdbc.common.ConnectionProvider;
+import org.apache.storm.jdbc.common.HikariCPConnectionProvider;
+import org.apache.storm.jdbc.common.JdbcClient;
+import org.apache.storm.jdbc.mapper.JdbcMapper;
+import org.apache.storm.jdbc.mapper.JdbcLookupMapper;
+import org.apache.storm.jdbc.mapper.SimpleJdbcMapper;
+import org.apache.storm.jdbc.mapper.SimpleJdbcLookupMapper;
+import org.apache.storm.jdbc.spout.UserSpout;
+import org.apache.storm.LocalCluster;
+
+import java.sql.Types;
+import java.util.List;
+import java.util.Map;
+
+public abstract class AbstractUserTopology {
+    private static final List<String> setupSqls = Lists.newArrayList(
+            "drop table if exists user",
+            "drop table if exists department",
+            "drop table if exists user_department",
+            "create table if not exists user (user_id integer, user_name varchar(100), dept_name varchar(100), create_date date)",
+            "create table if not exists department (dept_id integer, dept_name varchar(100))",
+            "create table if not exists user_department (user_id integer, dept_id integer)",
+            "insert into department values (1, 'R&D')",
+            "insert into department values (2, 'Finance')",
+            "insert into department values (3, 'HR')",
+            "insert into department values (4, 'Sales')",
+            "insert into user_department values (1, 1)",
+            "insert into user_department values (2, 2)",
+            "insert into user_department values (3, 3)",
+            "insert into user_department values (4, 4)"
+    );
+    protected UserSpout userSpout;
+    protected JdbcMapper jdbcMapper;
+    protected JdbcLookupMapper jdbcLookupMapper;
+    protected ConnectionProvider connectionProvider;
+
+    protected static final String TABLE_NAME = "user";
+    protected static final String JDBC_CONF = "jdbc.conf";
+    protected static final String SELECT_QUERY = "select dept_name from department, user_department where department.dept_id = user_department.dept_id" +
+            " and user_department.user_id = ?";
+
+    public void execute(String[] args) throws Exception {
+        if (args.length != 4 && args.length != 5) {
+            System.out.println("Usage: " + this.getClass().getSimpleName() + " <dataSourceClassName> <dataSource.url> "
+                    + "<user> <password> [topology name]");
+            System.exit(-1);
+        }
+        Map map = Maps.newHashMap();
+        map.put("dataSourceClassName", args[0]);//com.mysql.jdbc.jdbc2.optional.MysqlDataSource
+        map.put("dataSource.url", args[1]);//jdbc:mysql://localhost/test
+        map.put("dataSource.user", args[2]);//root
+
+        if(args.length == 4) {
+            map.put("dataSource.password", args[3]);//password
+        }
+
+        Config config = new Config();
+        config.put(JDBC_CONF, map);
+
+        ConnectionProvider connectionProvider = new HikariCPConnectionProvider(map);
+        connectionProvider.prepare();
+        int queryTimeoutSecs = 60;
+        JdbcClient jdbcClient = new JdbcClient(connectionProvider, queryTimeoutSecs);
+        for (String sql : setupSqls) {
+            jdbcClient.executeSql(sql);
+        }
+
+        this.userSpout = new UserSpout();
+        this.jdbcMapper = new SimpleJdbcMapper(TABLE_NAME, connectionProvider);
+        connectionProvider.cleanup();
+        Fields outputFields = new Fields("user_id", "user_name", "dept_name", "create_date");
+        List<Column> queryParamColumns = Lists.newArrayList(new Column("user_id", Types.INTEGER));
+        this.jdbcLookupMapper = new SimpleJdbcLookupMapper(outputFields, queryParamColumns);
+        this.connectionProvider = new HikariCPConnectionProvider(map);
+        if (args.length == 4) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test", config, getTopology());
+            Thread.sleep(30000);
+            cluster.killTopology("test");
+            cluster.shutdown();
+            System.exit(0);
+        } else {
+            StormSubmitter.submitTopology(args[4], config, getTopology());
+        }
+    }
+
+    public abstract StormTopology getTopology();
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistanceTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistanceTopology.java b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistanceTopology.java
new file mode 100644
index 0000000..1915219
--- /dev/null
+++ b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistanceTopology.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.jdbc.topology;
+
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.topology.TopologyBuilder;
+import com.google.common.collect.Lists;
+import org.apache.storm.jdbc.bolt.JdbcInsertBolt;
+import org.apache.storm.jdbc.bolt.JdbcLookupBolt;
+import org.apache.storm.jdbc.common.Column;
+import org.apache.storm.jdbc.mapper.JdbcMapper;
+import org.apache.storm.jdbc.mapper.SimpleJdbcMapper;
+
+import java.sql.Types;
+import java.util.List;
+
+
+public class UserPersistanceTopology extends AbstractUserTopology {
+    private static final String USER_SPOUT = "USER_SPOUT";
+    private static final String LOOKUP_BOLT = "LOOKUP_BOLT";
+    private static final String PERSISTANCE_BOLT = "PERSISTANCE_BOLT";
+
+    public static void main(String[] args) throws Exception {
+        new UserPersistanceTopology().execute(args);
+    }
+
+    @Override
+    public StormTopology getTopology() {
+        JdbcLookupBolt departmentLookupBolt = new JdbcLookupBolt(connectionProvider, SELECT_QUERY, this.jdbcLookupMapper);
+
+        //must specify column schema when providing custom query.
+        List<Column> schemaColumns = Lists.newArrayList(new Column("create_date", Types.DATE),
+                new Column("dept_name", Types.VARCHAR), new Column("user_id", Types.INTEGER), new Column("user_name", Types.VARCHAR));
+        JdbcMapper mapper = new SimpleJdbcMapper(schemaColumns);
+
+        JdbcInsertBolt userPersistanceBolt = new JdbcInsertBolt(connectionProvider, mapper)
+                .withInsertQuery("insert into user (create_date, dept_name, user_id, user_name) values (?,?,?,?)");
+
+        // userSpout ==> jdbcBolt
+        TopologyBuilder builder = new TopologyBuilder();
+
+        builder.setSpout(USER_SPOUT, this.userSpout, 1);
+        builder.setBolt(LOOKUP_BOLT, departmentLookupBolt, 1).shuffleGrouping(USER_SPOUT);
+        builder.setBolt(PERSISTANCE_BOLT, userPersistanceBolt, 1).shuffleGrouping(LOOKUP_BOLT);
+        return builder.createTopology();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistanceTridentTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistanceTridentTopology.java b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistanceTridentTopology.java
new file mode 100644
index 0000000..11269c3
--- /dev/null
+++ b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistanceTridentTopology.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.jdbc.topology;
+
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import com.google.common.collect.Lists;
+import org.apache.storm.jdbc.common.Column;
+import org.apache.storm.jdbc.mapper.SimpleJdbcLookupMapper;
+import org.apache.storm.jdbc.spout.UserSpout;
+import org.apache.storm.jdbc.trident.state.JdbcQuery;
+import org.apache.storm.jdbc.trident.state.JdbcState;
+import org.apache.storm.jdbc.trident.state.JdbcStateFactory;
+import org.apache.storm.jdbc.trident.state.JdbcUpdater;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentState;
+import org.apache.storm.trident.TridentTopology;
+
+import java.sql.Types;
+
+public class UserPersistanceTridentTopology extends AbstractUserTopology {
+
+    public static void main(String[] args) throws Exception {
+        new UserPersistanceTridentTopology().execute(args);
+    }
+
+    @Override
+    public StormTopology getTopology() {
+        TridentTopology topology = new TridentTopology();
+
+        JdbcState.Options options = new JdbcState.Options()
+                .withConnectionProvider(connectionProvider)
+                .withMapper(this.jdbcMapper)
+                .withJdbcLookupMapper(new SimpleJdbcLookupMapper(new Fields("dept_name"), Lists.newArrayList(new Column("user_id", Types.INTEGER))))
+                .withTableName(TABLE_NAME)
+                .withSelectQuery(SELECT_QUERY);
+
+        JdbcStateFactory jdbcStateFactory = new JdbcStateFactory(options);
+
+        Stream stream = topology.newStream("userSpout", new UserSpout());
+        TridentState state = topology.newStaticState(jdbcStateFactory);
+        stream = stream.stateQuery(state, new Fields("user_id","user_name","create_date"), new JdbcQuery(), new Fields("dept_name"));
+        stream.partitionPersist(jdbcStateFactory, new Fields("user_id","user_name","dept_name","create_date"),  new JdbcUpdater(), new Fields());
+        return topology.build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-kafka-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-kafka-examples/pom.xml b/examples/storm-kafka-examples/pom.xml
new file mode 100644
index 0000000..7133ad4
--- /dev/null
+++ b/examples/storm-kafka-examples/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>storm</artifactId>
+        <groupId>org.apache.storm</groupId>
+        <version>1.1.0-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>storm-kafka-examples</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-kafka</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-kafka-examples/src/main/java/org/apache/storm/kafka/TridentKafkaTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-kafka-examples/src/main/java/org/apache/storm/kafka/TridentKafkaTopology.java b/examples/storm-kafka-examples/src/main/java/org/apache/storm/kafka/TridentKafkaTopology.java
new file mode 100644
index 0000000..fdc6752
--- /dev/null
+++ b/examples/storm-kafka-examples/src/main/java/org/apache/storm/kafka/TridentKafkaTopology.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.kafka;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+import com.google.common.collect.ImmutableMap;
+import org.apache.storm.kafka.trident.TridentKafkaStateFactory;
+import org.apache.storm.kafka.trident.TridentKafkaUpdater;
+import org.apache.storm.kafka.trident.mapper.FieldNameBasedTupleToKafkaMapper;
+import org.apache.storm.kafka.trident.selector.DefaultTopicSelector;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.testing.FixedBatchSpout;
+
+import java.util.Properties;
+
+public class TridentKafkaTopology {
+
+    private static StormTopology buildTopology(String brokerConnectionString) {
+        Fields fields = new Fields("word", "count");
+        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
+                new Values("storm", "1"),
+                new Values("trident", "1"),
+                new Values("needs", "1"),
+                new Values("javadoc", "1")
+        );
+        spout.setCycle(true);
+
+        TridentTopology topology = new TridentTopology();
+        Stream stream = topology.newStream("spout1", spout);
+
+        Properties props = new Properties();
+        props.put("bootstrap.servers", brokerConnectionString);
+        props.put("acks", "1");
+        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+
+        TridentKafkaStateFactory stateFactory = new TridentKafkaStateFactory()
+            .withProducerProperties(props)
+            .withKafkaTopicSelector(new DefaultTopicSelector("test"))
+            .withTridentTupleToKafkaMapper(new FieldNameBasedTupleToKafkaMapper("word", "count"));
+        stream.partitionPersist(stateFactory, fields, new TridentKafkaUpdater(), new Fields());
+
+        return topology.build();
+    }
+
+    /**
+     * To run this topology ensure you have a kafka broker running and provide connection string to broker as argument.
+     * Create a topic test with command line,
+     * kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partition 1 --topic test
+     *
+     * run this program and run the kafka consumer:
+     * kafka-console-consumer.sh --zookeeper localhost:2181 --topic test --from-beginning
+     *
+     * you should see the messages flowing through.
+     *
+     * @param args
+     * @throws Exception
+     */
+    public static void main(String[] args) throws Exception {
+        if(args.length < 1) {
+            System.out.println("Please provide kafka broker url ,e.g. localhost:9092");
+        }
+
+        LocalCluster cluster = new LocalCluster();
+        cluster.submitTopology("wordCounter", new Config(), buildTopology(args[0]));
+        Thread.sleep(60 * 1000);
+        cluster.killTopology("wordCounter");
+
+        cluster.shutdown();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mongodb-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-mongodb-examples/pom.xml b/examples/storm-mongodb-examples/pom.xml
new file mode 100644
index 0000000..6b952f7
--- /dev/null
+++ b/examples/storm-mongodb-examples/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>storm</artifactId>
+        <groupId>org.apache.storm</groupId>
+        <version>1.1.0-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>storm-mongodb-examples</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-mongodb</artifactId>
+            <version>${project.version}</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/InsertWordCount.java
----------------------------------------------------------------------
diff --git a/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/InsertWordCount.java b/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/InsertWordCount.java
new file mode 100644
index 0000000..366acf4
--- /dev/null
+++ b/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/InsertWordCount.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.mongodb.topology;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.mongodb.bolt.MongoInsertBolt;
+import org.apache.storm.mongodb.common.mapper.MongoMapper;
+import org.apache.storm.mongodb.common.mapper.SimpleMongoMapper;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class InsertWordCount {
+    private static final String WORD_SPOUT = "WORD_SPOUT";
+    private static final String COUNT_BOLT = "COUNT_BOLT";
+    private static final String INSERT_BOLT = "INSERT_BOLT";
+
+    private static final String TEST_MONGODB_URL = "mongodb://127.0.0.1:27017/test";
+    private static final String TEST_MONGODB_COLLECTION_NAME = "wordcount";
+    
+
+    public static void main(String[] args) throws Exception {
+        Config config = new Config();
+
+        String url = TEST_MONGODB_URL;
+        String collectionName = TEST_MONGODB_COLLECTION_NAME;
+
+        if (args.length >= 2) {
+            url = args[0];
+            collectionName = args[1];
+        }
+
+        WordSpout spout = new WordSpout();
+        WordCounter bolt = new WordCounter();
+
+        MongoMapper mapper = new SimpleMongoMapper()
+                .withFields("word", "count");
+        
+        MongoInsertBolt insertBolt = new MongoInsertBolt(url, collectionName, mapper);
+
+        // wordSpout ==> countBolt ==> MongoInsertBolt
+        TopologyBuilder builder = new TopologyBuilder();
+
+        builder.setSpout(WORD_SPOUT, spout, 1);
+        builder.setBolt(COUNT_BOLT, bolt, 1).shuffleGrouping(WORD_SPOUT);
+        builder.setBolt(INSERT_BOLT, insertBolt, 1).fieldsGrouping(COUNT_BOLT, new Fields("word"));
+
+
+        if (args.length == 2) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test", config, builder.createTopology());
+            Thread.sleep(30000);
+            cluster.killTopology("test");
+            cluster.shutdown();
+            System.exit(0);
+        } else if (args.length == 3) {
+            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
+        } else{
+            System.out.println("Usage: InsertWordCount <mongodb url> <mongodb collection> [topology name]");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/UpdateWordCount.java
----------------------------------------------------------------------
diff --git a/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/UpdateWordCount.java b/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/UpdateWordCount.java
new file mode 100644
index 0000000..7895f35
--- /dev/null
+++ b/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/UpdateWordCount.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.mongodb.topology;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.topology.TopologyBuilder;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.mongodb.bolt.MongoInsertBolt;
+import org.apache.storm.mongodb.bolt.MongoUpdateBolt;
+import org.apache.storm.mongodb.common.QueryFilterCreator;
+import org.apache.storm.mongodb.common.SimpleQueryFilterCreator;
+import org.apache.storm.mongodb.common.mapper.MongoMapper;
+import org.apache.storm.mongodb.common.mapper.SimpleMongoMapper;
+import org.apache.storm.mongodb.common.mapper.SimpleMongoUpdateMapper;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class UpdateWordCount {
+    private static final String WORD_SPOUT = "WORD_SPOUT";
+    private static final String COUNT_BOLT = "COUNT_BOLT";
+    private static final String UPDATE_BOLT = "UPDATE_BOLT";
+
+    private static final String TEST_MONGODB_URL = "mongodb://127.0.0.1:27017/test";
+    private static final String TEST_MONGODB_COLLECTION_NAME = "wordcount";
+    
+
+    public static void main(String[] args) throws Exception {
+        Config config = new Config();
+
+        String url = TEST_MONGODB_URL;
+        String collectionName = TEST_MONGODB_COLLECTION_NAME;
+
+        if (args.length >= 2) {
+            url = args[0];
+            collectionName = args[1];
+        }
+
+        WordSpout spout = new WordSpout();
+        WordCounter bolt = new WordCounter();
+
+        MongoMapper mapper = new SimpleMongoUpdateMapper()
+                .withFields("word", "count");
+
+        QueryFilterCreator updateQueryCreator = new SimpleQueryFilterCreator()
+                .withField("word");
+        
+        MongoUpdateBolt updateBolt = new MongoUpdateBolt(url, collectionName, updateQueryCreator , mapper);
+
+        //if a new document should be inserted if there are no matches to the query filter
+        //updateBolt.withUpsert(true);
+
+        // wordSpout ==> countBolt ==> MongoUpdateBolt
+        TopologyBuilder builder = new TopologyBuilder();
+
+        builder.setSpout(WORD_SPOUT, spout, 1);
+        builder.setBolt(COUNT_BOLT, bolt, 1).shuffleGrouping(WORD_SPOUT);
+        builder.setBolt(UPDATE_BOLT, updateBolt, 1).fieldsGrouping(COUNT_BOLT, new Fields("word"));
+
+
+        if (args.length == 2) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("test", config, builder.createTopology());
+            Thread.sleep(30000);
+            cluster.killTopology("test");
+            cluster.shutdown();
+            System.exit(0);
+        } else if (args.length == 3) {
+            StormSubmitter.submitTopology(args[2], config, builder.createTopology());
+        } else{
+            System.out.println("Usage: UpdateWordCount <mongodb url> <mongodb collection> [topology name]");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/WordCounter.java
----------------------------------------------------------------------
diff --git a/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/WordCounter.java b/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/WordCounter.java
new file mode 100644
index 0000000..efb2d89
--- /dev/null
+++ b/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/WordCounter.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.mongodb.topology;
+
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.BasicOutputCollector;
+import org.apache.storm.topology.IBasicBolt;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.tuple.Values;
+import com.google.common.collect.Maps;
+
+import java.util.Map;
+
+import static org.apache.storm.utils.Utils.tuple;
+
+public class WordCounter implements IBasicBolt {
+    private Map<String, Integer> wordCounter = Maps.newHashMap();
+
+    public void prepare(Map stormConf, TopologyContext context) {
+        
+    }
+
+    public void execute(Tuple input, BasicOutputCollector collector) {
+        String word = input.getStringByField("word");
+        int count;
+        if (wordCounter.containsKey(word)) {
+            count = wordCounter.get(word) + 1;
+            wordCounter.put(word, wordCounter.get(word) + 1);
+        } else {
+            count = 1;
+        }
+
+        wordCounter.put(word, count);
+        collector.emit(new Values(word, String.valueOf(count)));
+    }
+
+    public void cleanup() {
+
+    }
+
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(new Fields("word", "count"));
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return null;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/WordSpout.java
----------------------------------------------------------------------
diff --git a/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/WordSpout.java b/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/WordSpout.java
new file mode 100644
index 0000000..885b1e8
--- /dev/null
+++ b/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/WordSpout.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.mongodb.topology;
+
+import org.apache.storm.spout.SpoutOutputCollector;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.IRichSpout;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+
+import java.util.Map;
+import java.util.Random;
+import java.util.UUID;
+
+public class WordSpout implements IRichSpout {
+    boolean isDistributed;
+    SpoutOutputCollector collector;
+    public static final String[] words = new String[] { "apple", "orange", "pineapple", "banana", "watermelon" };
+
+    public WordSpout() {
+        this(true);
+    }
+
+    public WordSpout(boolean isDistributed) {
+        this.isDistributed = isDistributed;
+    }
+
+    public boolean isDistributed() {
+        return this.isDistributed;
+    }
+
+    @SuppressWarnings("rawtypes")
+    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+        this.collector = collector;
+    }
+
+    public void close() {
+
+    }
+
+    public void nextTuple() {
+        final Random rand = new Random();
+        final String word = words[rand.nextInt(words.length)];
+        this.collector.emit(new Values(word), UUID.randomUUID());
+        Thread.yield();
+    }
+
+    public void ack(Object msgId) {
+
+    }
+
+    public void fail(Object msgId) {
+
+    }
+
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        declarer.declare(new Fields("word"));
+    }
+
+    @Override
+    public void activate() {
+    }
+
+    @Override
+    public void deactivate() {
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/trident/WordCountTrident.java
----------------------------------------------------------------------
diff --git a/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/trident/WordCountTrident.java b/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/trident/WordCountTrident.java
new file mode 100644
index 0000000..44447be
--- /dev/null
+++ b/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/trident/WordCountTrident.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.mongodb.trident;
+
+import org.apache.storm.Config;
+import org.apache.storm.LocalCluster;
+import org.apache.storm.StormSubmitter;
+import org.apache.storm.generated.StormTopology;
+import org.apache.storm.mongodb.common.mapper.MongoMapper;
+import org.apache.storm.mongodb.common.mapper.SimpleMongoMapper;
+import org.apache.storm.mongodb.trident.state.MongoState;
+import org.apache.storm.mongodb.trident.state.MongoStateFactory;
+import org.apache.storm.mongodb.trident.state.MongoStateUpdater;
+import org.apache.storm.trident.Stream;
+import org.apache.storm.trident.TridentState;
+import org.apache.storm.trident.TridentTopology;
+import org.apache.storm.trident.state.StateFactory;
+import org.apache.storm.trident.testing.FixedBatchSpout;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Values;
+
+public class WordCountTrident {
+
+    public static StormTopology buildTopology(String url, String collectionName){
+        Fields fields = new Fields("word", "count");
+        FixedBatchSpout spout = new FixedBatchSpout(fields, 4,
+                new Values("storm", 1),
+                new Values("trident", 1),
+                new Values("needs", 1),
+                new Values("javadoc", 1)
+        );
+        spout.setCycle(true);
+
+        MongoMapper mapper = new SimpleMongoMapper()
+                .withFields("word", "count");
+
+        MongoState.Options options = new MongoState.Options()
+                .withUrl(url)
+                .withCollectionName(collectionName)
+                .withMapper(mapper);
+
+        StateFactory factory = new MongoStateFactory(options);
+
+        TridentTopology topology = new TridentTopology();
+        Stream stream = topology.newStream("spout1", spout);
+
+        stream.partitionPersist(factory, fields,  new MongoStateUpdater(), new Fields());
+        return topology.build();
+    }
+
+    public static void main(String[] args) throws Exception {
+        Config conf = new Config();
+        conf.setMaxSpoutPending(5);
+        if (args.length == 2) {
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology("wordCounter", conf, buildTopology(args[0], args[1]));
+            Thread.sleep(60 * 1000);
+            cluster.killTopology("wordCounter");
+            cluster.shutdown();
+            System.exit(0);
+        }
+        else if(args.length == 3) {
+            conf.setNumWorkers(3);
+            StormSubmitter.submitTopology(args[2], conf, buildTopology(args[0], args[1]));
+        } else{
+            System.out.println("Usage: WordCountTrident <mongodb url> <mongodb collection> [topology name]");
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/97fe209e/examples/storm-mqtt-examples/pom.xml
----------------------------------------------------------------------
diff --git a/examples/storm-mqtt-examples/pom.xml b/examples/storm-mqtt-examples/pom.xml
new file mode 100644
index 0000000..d6f3a91
--- /dev/null
+++ b/examples/storm-mqtt-examples/pom.xml
@@ -0,0 +1,115 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <artifactId>storm-mqtt-examples</artifactId>
+  <packaging>jar</packaging>
+
+  <name>storm-mqtt-examples</name>
+
+  <parent>
+    <artifactId>storm</artifactId>
+    <groupId>org.apache.storm</groupId>
+    <version>1.1.0-SNAPSHOT</version>
+    <relativePath>../../pom.xml</relativePath>
+  </parent>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+  <dependencies>
+   <dependency>
+      <groupId>org.apache.storm</groupId>
+      <artifactId>storm-core</artifactId>
+      <version>${project.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.storm</groupId>
+      <artifactId>storm-mqtt</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+      <version>2.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-slf4j-impl</artifactId>
+      <version>2.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.storm</groupId>
+      <artifactId>flux-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.fusesource.mqtt-client</groupId>
+      <artifactId>mqtt-client</artifactId>
+      <version>1.10</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.activemq</groupId>
+      <artifactId>activemq-broker</artifactId>
+      <version>5.9.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.activemq</groupId>
+      <artifactId>activemq-mqtt</artifactId>
+      <version>5.9.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.activemq</groupId>
+      <artifactId>activemq-kahadb-store</artifactId>
+      <version>5.9.0</version>
+    </dependency>
+  </dependencies>
+  <build>
+    <plugins>
+
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <version>1.4</version>
+        <configuration>
+          <createDependencyReducedPom>true</createDependencyReducedPom>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+            <configuration>
+              <transformers>
+                <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+                <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                  <mainClass>org.apache.storm.flux.Flux</mainClass>
+                </transformer>
+              </transformers>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+</project>