You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by pe...@apache.org on 2017/08/20 15:03:01 UTC

[01/53] [abbrv] beam git commit: jstorm-runner: rename the package to org.apache.beam.runners.jstorm.

Repository: beam
Updated Branches:
  refs/heads/jstorm-runner 0a05de365 -> e00e0e841


http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/CoGroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/CoGroupByKeyTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/CoGroupByKeyTest.java
new file mode 100644
index 0000000..548fb20
--- /dev/null
+++ b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/CoGroupByKeyTest.java
@@ -0,0 +1,302 @@
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.runners.jstorm.StormPipelineOptions;
+import org.apache.beam.runners.jstorm.TestJStormRunner;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
+import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.coders.StringUtf8Coder;
+import org.apache.beam.sdk.options.PipelineOptionsFactory;
+import org.apache.beam.sdk.testing.PAssert;
+import org.apache.beam.sdk.testing.ValidatesRunner;
+import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.SerializableFunction;
+import org.apache.beam.sdk.transforms.join.CoGbkResult;
+import org.apache.beam.sdk.transforms.join.CoGroupByKey;
+import org.apache.beam.sdk.transforms.join.KeyedPCollectionTuple;
+import org.apache.beam.sdk.transforms.windowing.FixedWindows;
+import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
+import org.apache.beam.sdk.transforms.windowing.Window;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.TupleTag;
+import org.joda.time.Duration;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+
+@RunWith(JUnit4.class)
+public class CoGroupByKeyTest implements Serializable {
+    /**
+     * Converts the given list into a PCollection belonging to the provided
+     * Pipeline in such a way that coder inference needs to be performed.
+     */
+    private PCollection<KV<Integer, String>> createInput(String name,
+                                                         Pipeline p, List<KV<Integer, String>> list) {
+        return createInput(name, p, list,  new ArrayList<Long>());
+    }
+
+    /**
+     * Converts the given list with timestamps into a PCollection.
+     */
+    private PCollection<KV<Integer, String>> createInput(String name,
+                                                         Pipeline p, List<KV<Integer, String>> list, List<Long> timestamps) {
+        PCollection<KV<Integer, String>> input;
+        if (timestamps.isEmpty()) {
+            input = p.apply("Create" + name, Create.of(list)
+                    .withCoder(KvCoder.of(BigEndianIntegerCoder.of(), StringUtf8Coder.of())));
+        } else {
+            input = p.apply("Create" + name, Create.timestamped(list, timestamps)
+                    .withCoder(KvCoder.of(BigEndianIntegerCoder.of(), StringUtf8Coder.of())));
+        }
+        return input.apply(
+                "Identity" + name,
+                ParDo.of(
+                        new DoFn<KV<Integer, String>, KV<Integer, String>>() {
+                            @ProcessElement
+                            public void processElement(ProcessContext c) {
+                                c.output(c.element());
+                            }
+                        }));
+    }
+
+    /**
+     * Returns a {@code PCollection<KV<Integer, CoGbkResult>>} containing the result
+     * of a {@link CoGroupByKey} over 2 {@code PCollection<KV<Integer, String>>},
+     * where each {@link PCollection} has no duplicate keys and the key sets of
+     * each {@link PCollection} are intersecting but neither is a subset of the other.
+     */
+    private PCollection<KV<Integer, CoGbkResult>> buildGetOnlyGbk(
+            Pipeline p,
+            TupleTag<String> tag1,
+            TupleTag<String> tag2) {
+        List<KV<Integer, String>> list1 =
+                Arrays.asList(
+                        KV.of(1, "collection1-1"),
+                        KV.of(2, "collection1-2"));
+        List<KV<Integer, String>> list2 =
+                Arrays.asList(
+                        KV.of(2, "collection2-2"),
+                        KV.of(3, "collection2-3"));
+        PCollection<KV<Integer, String>> collection1 = createInput("CreateList1", p, list1);
+        PCollection<KV<Integer, String>> collection2 = createInput("CreateList2", p, list2);
+        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
+                KeyedPCollectionTuple.of(tag1, collection1)
+                        .and(tag2, collection2)
+                        .apply(CoGroupByKey.<Integer>create());
+        return coGbkResults;
+    }
+
+    @Test
+    @Category(ValidatesRunner.class)
+    public void testCoGroupByKeyGetOnly() {
+        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
+        options.setRunner(TestJStormRunner.class);
+        options.setLocalMode(true);
+
+        Pipeline p = Pipeline.create(options);
+
+        final TupleTag<String> tag1 = new TupleTag<>();
+        final TupleTag<String> tag2 = new TupleTag<>();
+
+        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
+                buildGetOnlyGbk(p, tag1, tag2);
+
+        PAssert.thatMap(coGbkResults).satisfies(
+                new SerializableFunction<Map<Integer, CoGbkResult>, Void>() {
+                    @Override
+                    public Void apply(Map<Integer, CoGbkResult> results) {
+                        assertEquals("collection1-1", results.get(1).getOnly(tag1));
+                        assertEquals("collection1-2", results.get(2).getOnly(tag1));
+                        assertEquals("collection2-2", results.get(2).getOnly(tag2));
+                        assertEquals("collection2-3", results.get(3).getOnly(tag2));
+                        return null;
+                    }
+                });
+
+        p.run();
+    }
+
+    /**
+     * Returns a {@code PCollection<KV<Integer, CoGbkResult>>} containing the
+     * results of the {@code CoGroupByKey} over three
+     * {@code PCollection<KV<Integer, String>>}, each of which correlates
+     * a customer id to purchases, addresses, or names, respectively.
+     */
+    private PCollection<KV<Integer, CoGbkResult>> buildPurchasesCoGbk(
+            Pipeline p,
+            TupleTag<String> purchasesTag,
+            TupleTag<String> addressesTag,
+            TupleTag<String> namesTag) {
+        List<KV<Integer, String>> idToPurchases =
+                Arrays.asList(
+                        KV.of(2, "Boat"),
+                        KV.of(1, "Shoes"),
+                        KV.of(3, "Car"),
+                        KV.of(1, "Book"),
+                        KV.of(10, "Pens"),
+                        KV.of(8, "House"),
+                        KV.of(4, "Suit"),
+                        KV.of(11, "House"),
+                        KV.of(14, "Shoes"),
+                        KV.of(2, "Suit"),
+                        KV.of(8, "Suit Case"),
+                        KV.of(3, "House"));
+
+        List<KV<Integer, String>> idToAddress =
+                Arrays.asList(
+                        KV.of(2, "53 S. 3rd"),
+                        KV.of(10, "383 Jackson Street"),
+                        KV.of(20, "3 W. Arizona"),
+                        KV.of(3, "29 School Rd"),
+                        KV.of(8, "6 Watling Rd"));
+
+        List<KV<Integer, String>> idToName =
+                Arrays.asList(
+                        KV.of(1, "John Smith"),
+                        KV.of(2, "Sally James"),
+                        KV.of(8, "Jeffery Spalding"),
+                        KV.of(20, "Joan Lichtfield"));
+
+        PCollection<KV<Integer, String>> purchasesTable =
+                createInput("CreateIdToPurchases", p, idToPurchases);
+
+        PCollection<KV<Integer, String>> addressTable =
+                createInput("CreateIdToAddress", p, idToAddress);
+
+        PCollection<KV<Integer, String>> nameTable =
+                createInput("CreateIdToName", p, idToName);
+
+        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
+                KeyedPCollectionTuple.of(namesTag, nameTable)
+                        .and(addressesTag, addressTable)
+                        .and(purchasesTag, purchasesTable)
+                        .apply(CoGroupByKey.<Integer>create());
+        return coGbkResults;
+    }
+
+    /**
+     * Returns a {@code PCollection<KV<Integer, CoGbkResult>>} containing the
+     * results of the {@code CoGroupByKey} over 2 {@code PCollection<KV<Integer, String>>},
+     * each of which correlates a customer id to clicks, purchases, respectively.
+     */
+    private PCollection<KV<Integer, CoGbkResult>> buildPurchasesCoGbkWithWindowing(
+            Pipeline p,
+            TupleTag<String> clicksTag,
+            TupleTag<String> purchasesTag) {
+        List<KV<Integer, String>> idToClick =
+                Arrays.asList(
+                        KV.of(1, "Click t0"),
+                        KV.of(2, "Click t2"),
+                        KV.of(1, "Click t4"),
+                        KV.of(1, "Click t6"),
+                        KV.of(2, "Click t8"));
+
+        List<KV<Integer, String>> idToPurchases =
+                Arrays.asList(
+                        KV.of(1, "Boat t1"),
+                        KV.of(1, "Shoesi t2"),
+                        KV.of(1, "Pens t3"),
+                        KV.of(2, "House t4"),
+                        KV.of(2, "Suit t5"),
+                        KV.of(1, "Car t6"),
+                        KV.of(1, "Book t7"),
+                        KV.of(2, "House t8"),
+                        KV.of(2, "Shoes t9"),
+                        KV.of(2, "House t10"));
+
+        PCollection<KV<Integer, String>> clicksTable =
+                createInput("CreateClicks",
+                        p,
+                        idToClick,
+                        Arrays.asList(0L, 2L, 4L, 6L, 8L))
+                        .apply("WindowClicks", Window.<KV<Integer, String>>into(
+                                FixedWindows.of(new Duration(4)))
+                                .withTimestampCombiner(TimestampCombiner.EARLIEST));
+
+        PCollection<KV<Integer, String>> purchasesTable =
+                createInput("CreatePurchases",
+                        p,
+                        idToPurchases,
+                        Arrays.asList(1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L))
+                        .apply("WindowPurchases", Window.<KV<Integer, String>>into(
+                                FixedWindows.of(new Duration(4)))
+                                .withTimestampCombiner(TimestampCombiner.EARLIEST));
+
+        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
+                KeyedPCollectionTuple.of(clicksTag, clicksTable)
+                        .and(purchasesTag, purchasesTable)
+                        .apply(CoGroupByKey.<Integer>create());
+        return coGbkResults;
+    }
+
+    @Test
+    @Category(ValidatesRunner.class)
+    public void testCoGroupByKey() {
+        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
+        options.setRunner(TestJStormRunner.class);
+        options.setLocalMode(true);
+
+        Pipeline p = Pipeline.create(options);
+
+        final TupleTag<String> namesTag = new TupleTag<>();
+        final TupleTag<String> addressesTag = new TupleTag<>();
+        final TupleTag<String> purchasesTag = new TupleTag<>();
+
+
+        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
+                buildPurchasesCoGbk(p, purchasesTag, addressesTag, namesTag);
+
+        PAssert.thatMap(coGbkResults).satisfies(
+                new SerializableFunction<Map<Integer, CoGbkResult>, Void>() {
+                    @Override
+                    public Void apply(Map<Integer, CoGbkResult> results) {
+                        CoGbkResult result1 = results.get(1);
+                        assertEquals("John Smith", result1.getOnly(namesTag));
+                        assertThat(result1.getAll(purchasesTag), containsInAnyOrder("Shoes", "Book"));
+
+                        CoGbkResult result2 = results.get(2);
+                        assertEquals("Sally James", result2.getOnly(namesTag));
+                        assertEquals("53 S. 3rd", result2.getOnly(addressesTag));
+                        assertThat(result2.getAll(purchasesTag), containsInAnyOrder("Suit", "Boat"));
+
+                        CoGbkResult result3 = results.get(3);
+                        assertEquals("29 School Rd", result3.getOnly(addressesTag), "29 School Rd");
+                        assertThat(result3.getAll(purchasesTag), containsInAnyOrder("Car", "House"));
+
+                        CoGbkResult result8 = results.get(8);
+                        assertEquals("Jeffery Spalding", result8.getOnly(namesTag));
+                        assertEquals("6 Watling Rd", result8.getOnly(addressesTag));
+                        assertThat(result8.getAll(purchasesTag), containsInAnyOrder("House", "Suit Case"));
+
+                        CoGbkResult result20 = results.get(20);
+                        assertEquals("Joan Lichtfield", result20.getOnly(namesTag));
+                        assertEquals("3 W. Arizona", result20.getOnly(addressesTag));
+
+                        assertEquals("383 Jackson Street", results.get(10).getOnly(addressesTag));
+
+                        assertThat(results.get(4).getAll(purchasesTag), containsInAnyOrder("Suit"));
+                        assertThat(results.get(10).getAll(purchasesTag), containsInAnyOrder("Pens"));
+                        assertThat(results.get(11).getAll(purchasesTag), containsInAnyOrder("House"));
+                        assertThat(results.get(14).getAll(purchasesTag), containsInAnyOrder("Shoes"));
+
+                        return null;
+                    }
+                });
+
+        p.run();
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTest.java
new file mode 100644
index 0000000..3e21a89
--- /dev/null
+++ b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTest.java
@@ -0,0 +1,158 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.runners.jstorm.StormPipelineOptions;
+
+import org.apache.beam.runners.jstorm.StormRunner;
+import org.apache.beam.runners.jstorm.TestJStormRunner;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
+import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.coders.StringUtf8Coder;
+import org.apache.beam.sdk.options.PipelineOptionsFactory;
+import org.apache.beam.sdk.testing.PAssert;
+import org.apache.beam.sdk.transforms.Count;
+import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.SerializableFunction;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.hamcrest.Description;
+import org.hamcrest.Matcher;
+import org.hamcrest.TypeSafeMatcher;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertThat;
+
+/**
+ * Tests for {@link GroupByKey} with {@link StormRunner}.
+ */
+@RunWith(JUnit4.class)
+public class GroupByKeyTest {
+
+    static final String[] WORDS_ARRAY = new String[] {
+            "hi", "there", "hi", "hi", "sue", "bob",
+            "hi", "sue", "", "", "ZOW", "bob", "" };
+
+    static final List<String> WORDS = Arrays.asList(WORDS_ARRAY);
+
+    @Test
+    public void testGroupByKey() {
+        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
+        options.setRunner(TestJStormRunner.class);
+        options.setLocalMode(true);
+
+        Pipeline p = Pipeline.create(options);
+
+        List<KV<String, Integer>> ungroupedPairs = Arrays.asList(
+                KV.of("k1", 3),
+                KV.of("k5", Integer.MAX_VALUE),
+                KV.of("k5", Integer.MIN_VALUE),
+                KV.of("k2", 66),
+                KV.of("k1", 4),
+                KV.of("k2", -33),
+                KV.of("k3", 0));
+
+        PCollection<KV<String, Integer>> input =
+                p.apply(Create.of(ungroupedPairs)
+                        .withCoder(KvCoder.of(StringUtf8Coder.of(), BigEndianIntegerCoder.of())));
+
+        PCollection<KV<String, Iterable<Integer>>> output =
+                input.apply(GroupByKey.<String, Integer>create());
+
+        PAssert.that(output)
+                .satisfies(new AssertThatHasExpectedContentsForTestGroupByKey());
+
+        p.run();
+    }
+
+    @Test
+    public void testCountGloballyBasic() {
+        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
+        options.setRunner(TestJStormRunner.class);
+        options.setLocalMode(true);
+
+        Pipeline p = Pipeline.create(options);
+        PCollection<String> input = p.apply(Create.of(WORDS));
+
+        PCollection<Long> output =
+                input.apply(Count.<String>globally());
+
+        PAssert.that(output)
+                .containsInAnyOrder(13L);
+        p.run();
+    }
+
+    static class AssertThatHasExpectedContentsForTestGroupByKey
+            implements SerializableFunction<Iterable<KV<String, Iterable<Integer>>>,
+            Void> {
+        @Override
+        public Void apply(Iterable<KV<String, Iterable<Integer>>> actual) {
+            assertThat(actual, containsInAnyOrder(
+                    KvMatcher.isKv(is("k1"), containsInAnyOrder(3, 4)),
+                    KvMatcher.isKv(is("k5"), containsInAnyOrder(Integer.MAX_VALUE,
+                            Integer.MIN_VALUE)),
+                    KvMatcher.isKv(is("k2"), containsInAnyOrder(66, -33)),
+                    KvMatcher.isKv(is("k3"), containsInAnyOrder(0))));
+            return null;
+        }
+    }
+
+    /**
+     * Matcher for KVs.
+     */
+    public static class KvMatcher<K, V>
+            extends TypeSafeMatcher<KV<? extends K, ? extends V>> {
+        final Matcher<? super K> keyMatcher;
+        final Matcher<? super V> valueMatcher;
+
+        public static <K, V> KvMatcher<K, V> isKv(Matcher<K> keyMatcher,
+                                                  Matcher<V> valueMatcher) {
+            return new KvMatcher<>(keyMatcher, valueMatcher);
+        }
+
+        public KvMatcher(Matcher<? super K> keyMatcher,
+                         Matcher<? super V> valueMatcher) {
+            this.keyMatcher = keyMatcher;
+            this.valueMatcher = valueMatcher;
+        }
+
+        @Override
+        public boolean matchesSafely(KV<? extends K, ? extends V> kv) {
+            return keyMatcher.matches(kv.getKey())
+                    && valueMatcher.matches(kv.getValue());
+        }
+
+        @Override
+        public void describeTo(Description description) {
+            description
+                    .appendText("a KV(").appendValue(keyMatcher)
+                    .appendText(", ").appendValue(valueMatcher)
+                    .appendText(")");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/ParDoTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/ParDoTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/ParDoTest.java
new file mode 100644
index 0000000..f2d1896
--- /dev/null
+++ b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/ParDoTest.java
@@ -0,0 +1,627 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.runners.jstorm.StormPipelineOptions;
+
+import org.apache.beam.runners.jstorm.StormRunner;
+import org.apache.beam.runners.jstorm.TestJStormRunner;
+import com.google.common.base.MoreObjects;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.coders.*;
+import org.apache.beam.sdk.io.GenerateSequence;
+import org.apache.beam.sdk.options.PipelineOptionsFactory;
+import org.apache.beam.sdk.state.*;
+import org.apache.beam.sdk.testing.PAssert;
+import org.apache.beam.sdk.testing.UsesMapState;
+import org.apache.beam.sdk.testing.UsesStatefulParDo;
+import org.apache.beam.sdk.testing.ValidatesRunner;
+import org.apache.beam.sdk.transforms.*;
+import org.apache.beam.sdk.transforms.windowing.*;
+import org.apache.beam.sdk.values.*;
+import org.joda.time.Duration;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.util.*;
+
+import static org.hamcrest.Matchers.anyOf;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
+import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+
+/**
+ * Tests for {@link ParDo} with {@link StormRunner}.
+ */
+@RunWith(JUnit4.class)
+public class ParDoTest implements Serializable {
+
+    @Test
+    public void testParDo() throws IOException {
+        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
+        options.setRunner(TestJStormRunner.class);
+        Pipeline pipeline = Pipeline.create(options);
+
+        List<Integer> inputs = Arrays.asList(3, -42, 666);
+
+        PCollection<String> output = pipeline
+                .apply(Create.of(inputs))
+                .apply(ParDo.of(new TestDoFn()));
+
+        PAssert.that(output)
+                .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
+
+        pipeline.run();
+    }
+
+    @Test
+    public void testParDoWithSideInputs() throws IOException {
+        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
+        options.setRunner(TestJStormRunner.class);
+        Pipeline pipeline = Pipeline.create(options);
+
+        List<Integer> inputs = Arrays.asList(3, -42, 666);
+
+        PCollectionView<Integer> sideInput1 = pipeline
+                .apply("CreateSideInput1", Create.of(11))
+                .apply("ViewSideInput1", View.<Integer>asSingleton());
+        PCollectionView<Integer> sideInputUnread = pipeline
+                .apply("CreateSideInputUnread", Create.of(-3333))
+                .apply("ViewSideInputUnread", View.<Integer>asSingleton());
+
+        PCollectionView<Integer> sideInput2 = pipeline
+                .apply("CreateSideInput2", Create.of(222))
+                .apply("ViewSideInput2", View.<Integer>asSingleton());
+        PCollection<String> output = pipeline
+                .apply(Create.of(inputs))
+                .apply(ParDo.of(new TestDoFn(
+                                Arrays.asList(sideInput1, sideInput2),
+                                Arrays.<TupleTag<String>>asList()))
+                        .withSideInputs(sideInput1, sideInputUnread, sideInput2));
+
+        PAssert.that(output)
+                .satisfies(ParDoTest.HasExpectedOutput
+                        .forInput(inputs)
+                        .andSideInputs(11, 222));
+
+        pipeline.run();
+    }
+
+    @Test
+    public void testParDoWithTaggedOutput() {
+        List<Integer> inputs = Arrays.asList(3, -42, 666);
+
+        TupleTag<String> mainOutputTag = new TupleTag<String>("main"){};
+        TupleTag<String> additionalOutputTag1 = new TupleTag<String>("additional1"){};
+        TupleTag<String> additionalOutputTag2 = new TupleTag<String>("additional2"){};
+        TupleTag<String> additionalOutputTag3 = new TupleTag<String>("additional3"){};
+        TupleTag<String> additionalOutputTagUnwritten = new TupleTag<String>("unwrittenOutput"){};
+
+        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
+        options.setRunner(TestJStormRunner.class);
+        Pipeline pipeline = Pipeline.create(options);
+
+        PCollectionTuple outputs = pipeline
+            .apply(Create.of(inputs))
+            .apply(ParDo
+                .of(new TestDoFn(
+                    Arrays.<PCollectionView<Integer>>asList(),
+                    Arrays.asList(additionalOutputTag1, additionalOutputTag2, additionalOutputTag3)))
+                .withOutputTags(
+                    mainOutputTag,
+                    TupleTagList.of(additionalOutputTag3)
+                        .and(additionalOutputTag1)
+                        .and(additionalOutputTagUnwritten)
+                        .and(additionalOutputTag2)));
+
+        PAssert.that(outputs.get(mainOutputTag))
+            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
+
+        PAssert.that(outputs.get(additionalOutputTag1))
+            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
+                .fromOutput(additionalOutputTag1));
+        PAssert.that(outputs.get(additionalOutputTag2))
+            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
+                .fromOutput(additionalOutputTag2));
+        PAssert.that(outputs.get(additionalOutputTag3))
+            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
+                .fromOutput(additionalOutputTag3));
+        PAssert.that(outputs.get(additionalOutputTagUnwritten)).empty();
+
+        pipeline.run();
+    }
+
+    @Test
+    public void testNoWindowFnDoesNotReassignWindows() {
+        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
+        options.setRunner(TestJStormRunner.class);
+        Pipeline pipeline = Pipeline.create(options);
+
+        final PCollection<Long> initialWindows =
+                pipeline
+                    .apply(GenerateSequence.from(0).to(10))
+                    .apply("AssignWindows", Window.into(new WindowOddEvenBuckets()));
+
+        // Sanity check the window assignment to demonstrate the baseline
+        PAssert.that(initialWindows)
+                .inWindow(WindowOddEvenBuckets.EVEN_WINDOW)
+                .containsInAnyOrder(0L, 2L, 4L, 6L, 8L);
+        PAssert.that(initialWindows)
+                .inWindow(WindowOddEvenBuckets.ODD_WINDOW)
+                .containsInAnyOrder(1L, 3L, 5L, 7L, 9L);
+
+        PCollection<Boolean> upOne =
+                initialWindows.apply(
+                        "ModifyTypes",
+                        MapElements.<Long, Boolean>via(
+                                new SimpleFunction<Long, Boolean>() {
+                                    @Override
+                                    public Boolean apply(Long input) {
+                                        return input % 2 == 0;
+                                    }
+                                }));
+        PAssert.that(upOne)
+                .inWindow(WindowOddEvenBuckets.EVEN_WINDOW)
+                .containsInAnyOrder(true, true, true, true, true);
+        PAssert.that(upOne)
+                .inWindow(WindowOddEvenBuckets.ODD_WINDOW)
+                .containsInAnyOrder(false, false, false, false, false);
+
+        // The elements should be in the same windows, even though they would not be assigned to the
+        // same windows with the updated timestamps. If we try to apply the original WindowFn, the type
+        // will not be appropriate and the runner should crash, as a Boolean cannot be converted into
+        // a long.
+        PCollection<Boolean> updatedTrigger =
+                upOne.apply(
+                        "UpdateWindowingStrategy",
+                        Window.<Boolean>configure().triggering(Never.ever())
+                                .withAllowedLateness(Duration.ZERO)
+                                .accumulatingFiredPanes());
+        pipeline.run();
+    }
+
+    @Test
+    public void testValueStateSameId() {
+        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
+        options.setRunner(TestJStormRunner.class);
+        Pipeline pipeline = Pipeline.create(options);
+
+        final String stateId = "foo";
+
+        DoFn<KV<String, Integer>, KV<String, Integer>> fn =
+                new DoFn<KV<String, Integer>, KV<String, Integer>>() {
+
+                    @StateId(stateId)
+                    private final StateSpec<ValueState<Integer>> intState =
+                            StateSpecs.value(VarIntCoder.of());
+
+                    @ProcessElement
+                    public void processElement(
+                            ProcessContext c, @StateId(stateId) ValueState<Integer> state) {
+                        Integer currentValue = MoreObjects.firstNonNull(state.read(), 0);
+                        c.output(KV.of("sizzle", currentValue));
+                        state.write(currentValue + 1);
+                    }
+                };
+
+        DoFn<KV<String, Integer>, Integer> fn2 =
+                new DoFn<KV<String, Integer>, Integer>() {
+
+                    @StateId(stateId)
+                    private final StateSpec<ValueState<Integer>> intState =
+                            StateSpecs.value(VarIntCoder.of());
+
+                    @ProcessElement
+                    public void processElement(
+                            ProcessContext c, @StateId(stateId) ValueState<Integer> state) {
+                        Integer currentValue = MoreObjects.firstNonNull(state.read(), 13);
+                        c.output(currentValue);
+                        state.write(currentValue + 13);
+                    }
+                };
+
+        PCollection<KV<String, Integer>> intermediate =
+                pipeline.apply(Create.of(KV.of("hello", 42), KV.of("hello", 97), KV.of("hello", 84)))
+                        .apply("First stateful ParDo", ParDo.of(fn));
+
+        PCollection<Integer> output =
+                intermediate.apply("Second stateful ParDo", ParDo.of(fn2));
+
+        PAssert.that(intermediate)
+                .containsInAnyOrder(KV.of("sizzle", 0), KV.of("sizzle", 1), KV.of("sizzle", 2));
+        PAssert.that(output).containsInAnyOrder(13, 26, 39);
+        pipeline.run();
+    }
+
+    @Test
+    @Category({ValidatesRunner.class, UsesStatefulParDo.class})
+    public void testValueStateTaggedOutput() {
+        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
+        options.setRunner(TestJStormRunner.class);
+        Pipeline pipeline = Pipeline.create(options);
+
+        final String stateId = "foo";
+
+        final TupleTag<Integer> evenTag = new TupleTag<Integer>() {};
+        final TupleTag<Integer> oddTag = new TupleTag<Integer>() {};
+
+        DoFn<KV<String, Integer>, Integer> fn =
+                new DoFn<KV<String, Integer>, Integer>() {
+
+                    @StateId(stateId)
+                    private final StateSpec<ValueState<Integer>> intState =
+                            StateSpecs.value(VarIntCoder.of());
+
+                    @ProcessElement
+                    public void processElement(
+                            ProcessContext c, @StateId(stateId) ValueState<Integer> state) {
+                        Integer currentValue = MoreObjects.firstNonNull(state.read(), 0);
+                        if (currentValue % 2 == 0) {
+                            c.output(currentValue);
+                        } else {
+                            c.output(oddTag, currentValue);
+                        }
+                        state.write(currentValue + 1);
+                    }
+                };
+
+        PCollectionTuple output =
+                pipeline.apply(
+                        Create.of(
+                                KV.of("hello", 42),
+                                KV.of("hello", 97),
+                                KV.of("hello", 84),
+                                KV.of("goodbye", 33),
+                                KV.of("hello", 859),
+                                KV.of("goodbye", 83945)))
+                        .apply(ParDo.of(fn).withOutputTags(evenTag, TupleTagList.of(oddTag)));
+
+        PCollection<Integer> evens = output.get(evenTag);
+        PCollection<Integer> odds = output.get(oddTag);
+
+        // There are 0 and 2 from "hello" and just 0 from "goodbye"
+        PAssert.that(evens).containsInAnyOrder(0, 2, 0);
+
+        // There are 1 and 3 from "hello" and just "1" from "goodbye"
+        PAssert.that(odds).containsInAnyOrder(1, 3, 1);
+        pipeline.run();
+    }
+
+    @Test
+    @Category({ValidatesRunner.class, UsesStatefulParDo.class, UsesMapState.class})
+    public void testMapStateCoderInference() {
+        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
+        options.setRunner(TestJStormRunner.class);
+        Pipeline pipeline = Pipeline.create(options);
+
+        final String stateId = "foo";
+        final String countStateId = "count";
+        Coder<MyInteger> myIntegerCoder = MyIntegerCoder.of();
+        pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder);
+
+        DoFn<KV<String, KV<String, Integer>>, KV<String, MyInteger>> fn =
+                new DoFn<KV<String, KV<String, Integer>>, KV<String, MyInteger>>() {
+
+                    @StateId(stateId)
+                    private final StateSpec<MapState<String, MyInteger>> mapState = StateSpecs.map();
+
+                    @StateId(countStateId)
+                    private final StateSpec<CombiningState<Integer, int[], Integer>>
+                            countState = StateSpecs.combiningFromInputInternal(VarIntCoder.of(),
+                            Sum.ofIntegers());
+
+                    @ProcessElement
+                    public void processElement(
+                            ProcessContext c, @StateId(stateId) MapState<String, MyInteger> state,
+                            @StateId(countStateId) CombiningState<Integer, int[], Integer>
+                                    count) {
+                        KV<String, Integer> value = c.element().getValue();
+                        state.put(value.getKey(), new MyInteger(value.getValue()));
+                        count.add(1);
+                        if (count.read() >= 4) {
+                            Iterable<Map.Entry<String, MyInteger>> iterate = state.entries().read();
+                            for (Map.Entry<String, MyInteger> entry : iterate) {
+                                c.output(KV.of(entry.getKey(), entry.getValue()));
+                            }
+                        }
+                    }
+                };
+
+        PCollection<KV<String, MyInteger>> output =
+                pipeline.apply(
+                        Create.of(
+                                KV.of("hello", KV.of("a", 97)), KV.of("hello", KV.of("b", 42)),
+                                KV.of("hello", KV.of("b", 42)), KV.of("hello", KV.of("c", 12))))
+                        .apply(ParDo.of(fn)).setCoder(KvCoder.of(StringUtf8Coder.of(), myIntegerCoder));
+
+        PAssert.that(output).containsInAnyOrder(KV.of("a", new MyInteger(97)),
+                KV.of("b", new MyInteger(42)), KV.of("c", new MyInteger(12)));
+        pipeline.run();
+    }
+
+
+    private static class WindowOddEvenBuckets extends NonMergingWindowFn<Long, IntervalWindow> {
+        private static final IntervalWindow EVEN_WINDOW =
+                new IntervalWindow(
+                        BoundedWindow.TIMESTAMP_MIN_VALUE, GlobalWindow.INSTANCE.maxTimestamp());
+        private static final IntervalWindow ODD_WINDOW =
+                new IntervalWindow(
+                        BoundedWindow.TIMESTAMP_MIN_VALUE, GlobalWindow.INSTANCE.maxTimestamp().minus(1));
+
+        @Override
+        public Collection<IntervalWindow> assignWindows(AssignContext c) throws Exception {
+            if (c.element() % 2 == 0) {
+                return Collections.singleton(EVEN_WINDOW);
+            }
+            return Collections.singleton(ODD_WINDOW);
+        }
+
+        @Override
+        public boolean isCompatible(WindowFn<?, ?> other) {
+            return other instanceof WindowOddEvenBuckets;
+        }
+
+        @Override
+        public Coder<IntervalWindow> windowCoder() {
+            return new IntervalWindow.IntervalWindowCoder();
+        }
+
+        @Override
+        public WindowMappingFn<IntervalWindow> getDefaultWindowMappingFn() {
+            throw new UnsupportedOperationException(
+                    String.format("Can't use %s for side inputs", getClass().getSimpleName()));
+        }
+    }
+
+
+    static class TestDoFn extends DoFn<Integer, String> {
+        enum State {NOT_SET_UP, UNSTARTED, STARTED, PROCESSING, FINISHED}
+
+        State state = State.NOT_SET_UP;
+
+        final List<PCollectionView<Integer>> sideInputViews = new ArrayList<>();
+        final List<TupleTag<String>> additionalOutputTupleTags = new ArrayList<>();
+
+        public TestDoFn() {
+        }
+
+        public TestDoFn(List<PCollectionView<Integer>> sideInputViews,
+                        List<TupleTag<String>> additionalOutputTupleTags) {
+            this.sideInputViews.addAll(sideInputViews);
+            this.additionalOutputTupleTags.addAll(additionalOutputTupleTags);
+        }
+
+        @Setup
+        public void prepare() {
+            assertEquals(State.NOT_SET_UP, state);
+            state = State.UNSTARTED;
+        }
+
+        @StartBundle
+        public void startBundle() {
+            assertThat(state,
+                anyOf(equalTo(State.UNSTARTED), equalTo(State.FINISHED)));
+
+            state = State.STARTED;
+        }
+
+        @ProcessElement
+        public void processElement(ProcessContext c) {
+            System.out.println("Recv elem: " + c.element());
+            assertThat(state,
+                anyOf(equalTo(State.STARTED), equalTo(State.PROCESSING)));
+            state = State.PROCESSING;
+            outputToAllWithSideInputs(c, "processing: " + c.element());
+        }
+
+        @FinishBundle
+        public void finishBundle(FinishBundleContext c) {
+            assertThat(state,
+                anyOf(equalTo(State.STARTED), equalTo(State.PROCESSING)));
+            state = State.FINISHED;
+            c.output("finished", BoundedWindow.TIMESTAMP_MIN_VALUE, GlobalWindow.INSTANCE);
+            for (TupleTag<String> additionalOutputTupleTag : additionalOutputTupleTags) {
+                c.output(
+                    additionalOutputTupleTag,
+                    additionalOutputTupleTag.getId() + ": " + "finished",
+                    BoundedWindow.TIMESTAMP_MIN_VALUE,
+                    GlobalWindow.INSTANCE);
+            }
+        }
+
+        private void outputToAllWithSideInputs(ProcessContext c, String value) {
+            if (!sideInputViews.isEmpty()) {
+                List<Integer> sideInputValues = new ArrayList<>();
+                for (PCollectionView<Integer> sideInputView : sideInputViews) {
+                    sideInputValues.add(c.sideInput(sideInputView));
+                }
+                value += ": " + sideInputValues;
+            }
+            c.output(value);
+            for (TupleTag<String> additionalOutputTupleTag : additionalOutputTupleTags) {
+                c.output(additionalOutputTupleTag,
+                    additionalOutputTupleTag.getId() + ": " + value);
+            }
+        }
+    }
+
+    private static class MyInteger implements Comparable<MyInteger> {
+        private final int value;
+
+        MyInteger(int value) {
+            this.value = value;
+        }
+
+        public int getValue() {
+            return value;
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (this == o) {
+                return true;
+            }
+
+            if (!(o instanceof MyInteger)) {
+                return false;
+            }
+
+            MyInteger myInteger = (MyInteger) o;
+
+            return value == myInteger.value;
+
+        }
+
+        @Override
+        public int hashCode() {
+            return value;
+        }
+
+        @Override
+        public int compareTo(MyInteger o) {
+            return Integer.compare(this.getValue(), o.getValue());
+        }
+
+        @Override
+        public String toString() {
+            return "MyInteger{" + "value=" + value + '}';
+        }
+    }
+
+    private static class MyIntegerCoder extends AtomicCoder<MyInteger> {
+        private static final MyIntegerCoder INSTANCE = new MyIntegerCoder();
+
+        private final VarIntCoder delegate = VarIntCoder.of();
+
+        public static MyIntegerCoder of() {
+            return INSTANCE;
+        }
+
+        @Override
+        public void encode(MyInteger value, OutputStream outStream)
+                throws CoderException, IOException {
+            delegate.encode(value.getValue(), outStream);
+        }
+
+        @Override
+        public MyInteger decode(InputStream inStream) throws CoderException,
+                IOException {
+            return new MyInteger(delegate.decode(inStream));
+        }
+    }
+
+    /** PAssert "matcher" for expected output. */
+    static class HasExpectedOutput
+        implements SerializableFunction<Iterable<String>, Void>, Serializable {
+        private final List<Integer> inputs;
+        private final List<Integer> sideInputs;
+        private final String additionalOutput;
+        private final boolean ordered;
+
+        public static HasExpectedOutput forInput(List<Integer> inputs) {
+            return new HasExpectedOutput(
+                new ArrayList<Integer>(inputs),
+                new ArrayList<Integer>(),
+                "",
+                false);
+        }
+
+        private HasExpectedOutput(List<Integer> inputs,
+                                  List<Integer> sideInputs,
+                                  String additionalOutput,
+                                  boolean ordered) {
+            this.inputs = inputs;
+            this.sideInputs = sideInputs;
+            this.additionalOutput = additionalOutput;
+            this.ordered = ordered;
+        }
+
+        public HasExpectedOutput andSideInputs(Integer... sideInputValues) {
+            List<Integer> sideInputs = new ArrayList<>();
+            for (Integer sideInputValue : sideInputValues) {
+                sideInputs.add(sideInputValue);
+            }
+            return new HasExpectedOutput(inputs, sideInputs, additionalOutput, ordered);
+        }
+
+        public HasExpectedOutput fromOutput(TupleTag<String> outputTag) {
+            return fromOutput(outputTag.getId());
+        }
+        public HasExpectedOutput fromOutput(String outputId) {
+            return new HasExpectedOutput(inputs, sideInputs, outputId, ordered);
+        }
+
+        public HasExpectedOutput inOrder() {
+            return new HasExpectedOutput(inputs, sideInputs, additionalOutput, true);
+        }
+
+        @Override
+        public Void apply(Iterable<String> outputs) {
+            List<String> processeds = new ArrayList<>();
+            List<String> finisheds = new ArrayList<>();
+            for (String output : outputs) {
+                if (output.contains("finished")) {
+                    finisheds.add(output);
+                } else {
+                    processeds.add(output);
+                }
+            }
+
+            String sideInputsSuffix;
+            if (sideInputs.isEmpty()) {
+                sideInputsSuffix = "";
+            } else {
+                sideInputsSuffix = ": " + sideInputs;
+            }
+
+            String additionalOutputPrefix;
+            if (additionalOutput.isEmpty()) {
+                additionalOutputPrefix = "";
+            } else {
+                additionalOutputPrefix = additionalOutput + ": ";
+            }
+
+            List<String> expectedProcesseds = new ArrayList<>();
+            for (Integer input : inputs) {
+                expectedProcesseds.add(
+                    additionalOutputPrefix + "processing: " + input + sideInputsSuffix);
+            }
+            String[] expectedProcessedsArray =
+                expectedProcesseds.toArray(new String[expectedProcesseds.size()]);
+            if (!ordered || expectedProcesseds.isEmpty()) {
+                assertThat(processeds, containsInAnyOrder(expectedProcessedsArray));
+            } else {
+                assertThat(processeds, contains(expectedProcessedsArray));
+            }
+
+            for (String finished : finisheds) {
+                assertEquals(additionalOutputPrefix + "finished", finished);
+            }
+
+            return null;
+        }
+    }
+}


[19/53] [abbrv] beam git commit: jstorm-runner: remove AdaptorBasicBolt and AdaptorBasicSpout.

Posted by pe...@apache.org.
jstorm-runner: remove AdaptorBasicBolt and AdaptorBasicSpout.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/9309ac49
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/9309ac49
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/9309ac49

Branch: refs/heads/jstorm-runner
Commit: 9309ac49d81e1d6dfd694ec885cdb12a3db53483
Parents: 5a15d54
Author: Pei He <pe...@apache.org>
Authored: Fri Jul 14 14:50:47 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:57 2017 +0800

----------------------------------------------------------------------
 .../beam/runners/jstorm/JStormRunner.java       | 16 +++---------
 .../jstorm/translation/TranslationContext.java  | 12 ++++-----
 .../translation/runtime/AdaptorBasicBolt.java   | 27 --------------------
 .../translation/runtime/AdaptorBasicSpout.java  | 27 --------------------
 .../translation/runtime/ExecutorsBolt.java      |  3 ++-
 .../runtime/UnboundedSourceSpout.java           |  3 ++-
 6 files changed, 14 insertions(+), 74 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/9309ac49/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
index 00ec7f6..8782130 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
@@ -41,8 +41,6 @@ import org.apache.beam.runners.jstorm.serialization.UnmodifiableCollectionsSeria
 import org.apache.beam.runners.jstorm.translation.JStormPipelineTranslator;
 import org.apache.beam.runners.jstorm.translation.TranslationContext;
 import org.apache.beam.runners.jstorm.translation.runtime.AbstractComponent;
-import org.apache.beam.runners.jstorm.translation.runtime.AdaptorBasicBolt;
-import org.apache.beam.runners.jstorm.translation.runtime.AdaptorBasicSpout;
 import org.apache.beam.runners.jstorm.translation.runtime.ExecutorsBolt;
 import org.apache.beam.runners.jstorm.translation.runtime.TxExecutorsBolt;
 import org.apache.beam.runners.jstorm.translation.runtime.TxUnboundedSourceSpout;
@@ -155,18 +153,12 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
 
   private AbstractComponent getComponent(
       String id, TranslationContext.ExecutionGraphContext context) {
-    AbstractComponent component = null;
-    AdaptorBasicSpout spout = context.getSpout(id);
+    AbstractComponent spout = context.getSpout(id);
     if (spout != null) {
-      component = spout;
+      return spout;
     } else {
-      AdaptorBasicBolt bolt = context.getBolt(id);
-      if (bolt != null) {
-        component = bolt;
-      }
+      return context.getBolt(id);
     }
-
-    return component;
   }
 
   private StormTopology getTopology(
@@ -176,7 +168,7 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
         isExactlyOnce ? new TransactionTopologyBuilder() : new TopologyBuilder();
 
     int parallelismNumber = options.getParallelismNumber();
-    Map<String, AdaptorBasicSpout> spouts = context.getSpouts();
+    Map<String, UnboundedSourceSpout> spouts = context.getSpouts();
     for (String id : spouts.keySet()) {
       IRichSpout spout = getSpout(isExactlyOnce, spouts.get(id));
       builder.setSpout(id, spout, getParallelismNum(spouts.get(id), parallelismNumber));

http://git-wip-us.apache.org/repos/asf/beam/blob/9309ac49/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
index 1230a31..28d102d 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
@@ -34,9 +34,9 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import org.apache.beam.runners.jstorm.JStormPipelineOptions;
-import org.apache.beam.runners.jstorm.translation.runtime.AdaptorBasicSpout;
 import org.apache.beam.runners.jstorm.translation.runtime.Executor;
 import org.apache.beam.runners.jstorm.translation.runtime.ExecutorsBolt;
+import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
 import org.apache.beam.runners.jstorm.translation.translator.Stream;
 import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
 import org.apache.beam.runners.jstorm.util.RunnerUtils;
@@ -333,7 +333,7 @@ public class TranslationContext {
    */
   public static class ExecutionGraphContext {
 
-    private final Map<String, AdaptorBasicSpout> spoutMap = new HashMap<>();
+    private final Map<String, UnboundedSourceSpout> spoutMap = new HashMap<>();
     private final Map<String, ExecutorsBolt> boltMap = new HashMap<>();
 
     // One-to-one mapping between Stream.Producer and TaggedPValue (or PValue).
@@ -344,7 +344,7 @@ public class TranslationContext {
 
     private int id = 1;
 
-    public void registerSpout(AdaptorBasicSpout spout, TaggedPValue output) {
+    public void registerSpout(UnboundedSourceSpout spout, TaggedPValue output) {
       checkNotNull(spout, "spout");
       checkNotNull(output, "output");
       String name = "spout" + genId();
@@ -354,14 +354,14 @@ public class TranslationContext {
           Stream.Producer.of(name, output.getTag().getId(), output.getValue().getName()));
     }
 
-    public AdaptorBasicSpout getSpout(String id) {
+    public UnboundedSourceSpout getSpout(String id) {
       if (Strings.isNullOrEmpty(id)) {
         return null;
       }
       return this.spoutMap.get(id);
     }
 
-    public Map<String, AdaptorBasicSpout> getSpouts() {
+    public Map<String, UnboundedSourceSpout> getSpouts() {
       return this.spoutMap;
     }
 
@@ -418,7 +418,7 @@ public class TranslationContext {
     public String toString() {
       List<String> ret = new ArrayList<>();
       ret.add("SPOUT");
-      for (Map.Entry<String, AdaptorBasicSpout> entry : spoutMap.entrySet()) {
+      for (Map.Entry<String, UnboundedSourceSpout> entry : spoutMap.entrySet()) {
         ret.add(entry.getKey() + ": " + entry.getValue().toString());
       }
       ret.add("BOLT");

http://git-wip-us.apache.org/repos/asf/beam/blob/9309ac49/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
deleted file mode 100644
index d8d4d46..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import backtype.storm.topology.IRichBatchBolt;
-
-/**
- * Adaptor bolt of JStorm extends {@link AbstractComponent}.
- */
-public abstract class AdaptorBasicBolt extends AbstractComponent implements IRichBatchBolt {
-
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/9309ac49/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
deleted file mode 100644
index 814d416..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import backtype.storm.topology.IRichSpout;
-
-/**
- * Adaptor bolt of JStorm extends {@link AbstractComponent}.
- */
-public abstract class AdaptorBasicSpout extends AbstractComponent implements IRichSpout {
-
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/9309ac49/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
index d33c17a..0366c13 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
@@ -21,6 +21,7 @@ import static com.google.common.base.Preconditions.checkNotNull;
 
 import backtype.storm.task.OutputCollector;
 import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.IRichBatchBolt;
 import backtype.storm.tuple.ITupleExt;
 import backtype.storm.tuple.Tuple;
 import backtype.storm.tuple.Values;
@@ -52,7 +53,7 @@ import org.slf4j.LoggerFactory;
 /**
  * ExecutorsBolt is a JStorm Bolt composited with several executors chained in a sub-DAG.
  */
-public class ExecutorsBolt extends AdaptorBasicBolt {
+public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
   private static final long serialVersionUID = -7751043327801735211L;
 
   private static final Logger LOG = LoggerFactory.getLogger(ExecutorsBolt.class);

http://git-wip-us.apache.org/repos/asf/beam/blob/9309ac49/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
index 006cd47..690824d 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
@@ -21,6 +21,7 @@ import static com.google.common.base.Preconditions.checkNotNull;
 
 import backtype.storm.spout.SpoutOutputCollector;
 import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.IRichSpout;
 import backtype.storm.tuple.Values;
 import com.alibaba.jstorm.utils.KryoSerializer;
 import java.io.IOException;
@@ -43,7 +44,7 @@ import org.slf4j.LoggerFactory;
  * Spout implementation that wraps a Beam UnboundedSource.
  * TODO: add wrapper to support metrics in UnboundedSource.
  */
-public class UnboundedSourceSpout extends AdaptorBasicSpout {
+public class UnboundedSourceSpout extends AbstractComponent implements IRichSpout {
   private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceSpout.class);
 
   private final String description;


[07/53] [abbrv] beam git commit: jstorm-runner: rename the package to org.apache.beam.runners.jstorm.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/StatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/StatefulDoFnExecutor.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/StatefulDoFnExecutor.java
deleted file mode 100644
index 889977b..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/StatefulDoFnExecutor.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import com.alibaba.jstorm.beam.StormPipelineOptions;
-import com.alibaba.jstorm.beam.translation.runtime.state.JStormStateInternals;
-import com.alibaba.jstorm.beam.translation.runtime.timer.JStormTimerInternals;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.WindowingStrategy;
-
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-
-public class StatefulDoFnExecutor<OutputT> extends DoFnExecutor<KV, OutputT> {
-    public StatefulDoFnExecutor(
-            String stepName, String description, StormPipelineOptions pipelineOptions,
-            DoFn<KV, OutputT> doFn, Coder<WindowedValue<KV>> inputCoder,
-            WindowingStrategy<?, ?> windowingStrategy, TupleTag<KV> mainInputTag,
-            Collection<PCollectionView<?>> sideInputs, Map<TupleTag, PCollectionView<?>>
-                    sideInputTagToView, TupleTag<OutputT> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
-        super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy,
-                mainInputTag, sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
-    }
-
-    @Override
-    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-        if (mainInputTag.equals(tag)) {
-            WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
-            stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
-                    executorContext.getExecutorsBolt().timerService()));
-            stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
-                    kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-            processMainInput(elem);
-        } else {
-            processSideInput(tag, elem);
-        }
-    }
-
-    @Override
-    public void onTimer(Object key, TimerInternals.TimerData timerData) {
-        stepContext.setStateInternals(new JStormStateInternals<>(key,
-                kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-        super.onTimer(key, timerData);
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TimerService.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TimerService.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TimerService.java
deleted file mode 100644
index 60d2f1a..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TimerService.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import org.apache.beam.runners.core.TimerInternals;
-import org.joda.time.Instant;
-
-import java.io.Serializable;
-import java.util.List;
-
-/**
- * Interface that tracks input watermarks and manages timers in each bolt.
- */
-public interface TimerService extends Serializable {
-
-    void init(List<Integer> upStreamTasks);
-
-    /**
-     *
-     * @param task
-     * @param inputWatermark
-     * @return new watermark if any timer is triggered during the update of watermark, otherwise 0
-     */
-    long updateInputWatermark(Integer task, long inputWatermark);
-
-    long currentInputWatermark();
-
-    long currentOutputWatermark();
-
-    void clearWatermarkHold(String namespace);
-
-    void addWatermarkHold(String namespace, Instant watermarkHold);
-
-    void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor);
-
-    void fireTimers(long newWatermark);
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TimerServiceImpl.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TimerServiceImpl.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TimerServiceImpl.java
deleted file mode 100644
index 1411428..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TimerServiceImpl.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import avro.shaded.com.google.common.collect.Maps;
-import avro.shaded.com.google.common.collect.Sets;
-import com.alibaba.jstorm.utils.Pair;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.state.TimeDomain;
-import org.joda.time.Instant;
-
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkState;
-
-/**
- * Default implementation of {@link TimerService}.
- */
-public class TimerServiceImpl implements TimerService {
-    private transient ExecutorContext executorContext;
-    private transient Map<Integer, DoFnExecutor> idToDoFnExecutor;
-
-    private final ConcurrentMap<Integer, Long> upStreamTaskToInputWatermark = new ConcurrentHashMap<>();
-    private final PriorityQueue<Long> inputWatermarks = new PriorityQueue<>();
-    private final PriorityQueue<Instant> watermarkHolds = new PriorityQueue<>();
-    private final Map<String, Instant> namespaceToWatermarkHold = new HashMap<>();
-    private transient final PriorityQueue<TimerInternals.TimerData> eventTimeTimersQueue = new PriorityQueue<>();
-    private final Map<TimerInternals.TimerData, Set<Pair<Integer, Object>>>
-            timerDataToKeyedExecutors = Maps.newHashMap();
-
-    private boolean initialized = false;
-
-    public TimerServiceImpl() {
-    }
-
-    public TimerServiceImpl(ExecutorContext executorContext) {
-        this.executorContext = executorContext;
-        this.idToDoFnExecutor = executorContext.getExecutorsBolt().getIdToDoFnExecutor();
-    }
-
-    @Override
-    public void init(List<Integer> upStreamTasks) {
-        for (Integer task : upStreamTasks) {
-            upStreamTaskToInputWatermark.put(task, BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
-            inputWatermarks.add(BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
-        }
-        initialized = true;
-    }
-
-    @Override
-    public synchronized long updateInputWatermark(Integer task, long taskInputWatermark) {
-        checkState(initialized, "TimerService has not been initialized.");
-        Long oldTaskInputWatermark = upStreamTaskToInputWatermark.get(task);
-        // Make sure the input watermark don't go backward.
-        if (taskInputWatermark > oldTaskInputWatermark) {
-            upStreamTaskToInputWatermark.put(task, taskInputWatermark);
-            inputWatermarks.add(taskInputWatermark);
-            inputWatermarks.remove(oldTaskInputWatermark);
-
-            long newLocalInputWatermark = currentInputWatermark();
-            if (newLocalInputWatermark > oldTaskInputWatermark) {
-                return newLocalInputWatermark;
-            }
-        }
-        return 0;
-    }
-
-    @Override
-    public void fireTimers(long newWatermark) {
-        TimerInternals.TimerData timerData;
-        while ((timerData = eventTimeTimersQueue.peek()) != null
-                && timerData.getTimestamp().getMillis() <= newWatermark) {
-            for (Pair<Integer, Object> keyedExecutor : timerDataToKeyedExecutors.get(timerData)) {
-                DoFnExecutor executor = idToDoFnExecutor.get(keyedExecutor.getFirst());
-                executor.onTimer(keyedExecutor.getSecond(), timerData);
-            }
-            eventTimeTimersQueue.remove();
-            timerDataToKeyedExecutors.remove(timerData);
-        }
-    }
-
-    @Override
-    public long currentInputWatermark() {
-        return initialized ? inputWatermarks.peek() : BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis();
-    }
-
-    @Override
-    public long currentOutputWatermark() {
-        if (watermarkHolds.isEmpty()) {
-            return currentInputWatermark();
-        } else {
-            return Math.min(currentInputWatermark(), watermarkHolds.peek().getMillis());
-        }
-    }
-
-    @Override
-    public void clearWatermarkHold(String namespace) {
-        Instant currentHold = namespaceToWatermarkHold.get(namespace);
-        if (currentHold != null) {
-            watermarkHolds.remove(currentHold);
-            namespaceToWatermarkHold.remove(namespace);
-        }
-    }
-
-    @Override
-    public void addWatermarkHold(String namespace, Instant watermarkHold) {
-        Instant currentHold = namespaceToWatermarkHold.get(namespace);
-        if (currentHold == null) {
-            namespaceToWatermarkHold.put(namespace, watermarkHold);
-            watermarkHolds.add(watermarkHold);
-        } else if (currentHold != null && watermarkHold.isBefore(currentHold)) {
-            namespaceToWatermarkHold.put(namespace, watermarkHold);
-            watermarkHolds.add(watermarkHold);
-            watermarkHolds.remove(currentHold);
-        }
-    }
-
-    @Override
-    public void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor) {
-        checkArgument(
-                TimeDomain.EVENT_TIME.equals(timerData.getDomain()),
-                String.format("Does not support domain: %s.", timerData.getDomain()));
-        Set<Pair<Integer, Object>> keyedExecutors = timerDataToKeyedExecutors.get(timerData);
-        if (keyedExecutors == null) {
-            keyedExecutors = Sets.newHashSet();
-            eventTimeTimersQueue.add(timerData);
-        }
-        keyedExecutors.add(new Pair<>(doFnExecutor.getInternalDoFnExecutorId(), key));
-        timerDataToKeyedExecutors.put(timerData, keyedExecutors);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TxExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TxExecutorsBolt.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TxExecutorsBolt.java
deleted file mode 100644
index 5049d72..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TxExecutorsBolt.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-import com.alibaba.jstorm.cache.IKvStore;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.alibaba.jstorm.transactional.bolt.ITransactionStatefulBoltExecutor;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.Map;
-
-public class TxExecutorsBolt implements ITransactionStatefulBoltExecutor {
-    private static final Logger LOG = LoggerFactory.getLogger(TxExecutorsBolt.class);
-
-    private static final String TIME_SERVICE_STORE_ID = "timer_service_store";
-    private static final String TIMER_SERVICE_KET = "timer_service_key";
-
-    private ExecutorsBolt executorsBolt;
-    private IKvStoreManager kvStoreManager;
-    private IKvStore<String, TimerService> timerServiceStore;
-
-    public TxExecutorsBolt(ExecutorsBolt executorsBolt) {
-        this.executorsBolt = executorsBolt;
-        this.executorsBolt.setStatefulBolt(true);
-    }
-
-    @Override
-    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
-        try {
-            executorsBolt.prepare(stormConf, context, collector);
-            kvStoreManager = executorsBolt.getExecutorContext().getKvStoreManager();
-            timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
-        } catch (IOException e) {
-            LOG.error("Failed to prepare stateful bolt", e);
-            throw new RuntimeException(e.getMessage());
-        }
-    }
-
-    @Override
-    public void execute(Tuple input) {
-        executorsBolt.execute(input);
-    }
-
-    @Override
-    public void cleanup() {
-        executorsBolt.cleanup();
-    }
-
-    @Override
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        executorsBolt.declareOutputFields(declarer);
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return executorsBolt.getComponentConfiguration();
-    }
-
-    @Override
-    public void initState(Object userState) {
-        LOG.info("Begin to init from state: {}", userState);
-        restore(userState);
-    }
-
-    @Override
-    public Object finishBatch(long batchId) {
-        try {
-            timerServiceStore.put(TIMER_SERVICE_KET, executorsBolt.timerService());
-        } catch (IOException e) {
-            LOG.error("Failed to store current timer service status", e);
-            throw new RuntimeException(e.getMessage());
-        }
-        kvStoreManager.checkpoint(batchId);
-        return null;
-    }
-
-    @Override
-    public Object commit(long batchId, Object state) {
-        return kvStoreManager.backup(batchId);
-    }
-
-    @Override
-    public void rollBack(Object userState) {
-        LOG.info("Begin to rollback from state: {}", userState);
-        restore(userState);
-    }
-
-    @Override
-    public void ackCommit(long batchId, long timeStamp) {
-        kvStoreManager.remove(batchId);
-    }
-
-    private void restore(Object userState) {
-        try {
-            // restore all states
-            kvStoreManager.restore(userState);
-
-            // init timer service
-            timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
-            TimerService timerService = timerServiceStore.get(TIMER_SERVICE_KET);
-            if (timerService == null) {
-                timerService = executorsBolt.initTimerService();
-            }
-            executorsBolt.setTimerService(timerService);
-        } catch (IOException e) {
-            LOG.error("Failed to restore state", e);
-            throw new RuntimeException(e.getMessage());
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TxUnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TxUnboundedSourceSpout.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TxUnboundedSourceSpout.java
deleted file mode 100644
index 65ce814..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/TxUnboundedSourceSpout.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import com.alibaba.jstorm.cache.IKvStore;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.alibaba.jstorm.cache.KvStoreManagerFactory;
-import com.alibaba.jstorm.transactional.spout.ITransactionSpoutExecutor;
-import org.apache.beam.sdk.io.UnboundedSource;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.Map;
-
-public class TxUnboundedSourceSpout implements ITransactionSpoutExecutor {
-    private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(TxUnboundedSourceSpout.class);
-
-    private static final String SOURCE_STORE_ID = "SourceCheckpoint";
-    private static final String CHECKPOINT_MARK = "CheckpointMark";
-
-    private UnboundedSourceSpout sourceSpout;
-    private UnboundedSource.UnboundedReader reader;
-    private IKvStoreManager kvStoreManager;
-    private IKvStore<String, UnboundedSource.CheckpointMark> sourceCheckpointStore;
-
-    public TxUnboundedSourceSpout(UnboundedSourceSpout sourceSpout) {
-        this.sourceSpout = sourceSpout;
-    }
-
-    private void restore(Object userState) {
-        try {
-            kvStoreManager.restore(userState);
-            sourceCheckpointStore = kvStoreManager.getOrCreate(SOURCE_STORE_ID);
-            UnboundedSource.CheckpointMark checkpointMark = sourceCheckpointStore.get(CHECKPOINT_MARK);
-            sourceSpout.createSourceReader(checkpointMark);
-            reader = sourceSpout.getUnboundedSourceReader();
-        } catch (IOException e) {
-            LOG.error("Failed to init state", e);
-            throw new RuntimeException(e.getMessage());
-        }
-    }
-
-    @Override
-    public void initState(Object userState) {
-        restore(userState);
-    }
-
-    @Override
-    public Object finishBatch(long checkpointId) {
-        try {
-            // Store check point mark from unbounded source reader
-            UnboundedSource.CheckpointMark checkpointMark = reader.getCheckpointMark();
-            sourceCheckpointStore.put(CHECKPOINT_MARK, checkpointMark);
-
-            // checkpoint all kv stores in current manager
-            kvStoreManager.checkpoint(checkpointId);
-        } catch (IOException e) {
-            LOG.error(String.format("Failed to finish batch-%s", checkpointId), e);
-            throw new RuntimeException(e.getMessage());
-        }
-        return null;
-    }
-
-    @Override
-    public Object commit(long batchId, Object state) {
-        // backup kv stores to remote state backend
-        return kvStoreManager.backup(batchId);
-    }
-
-    @Override
-    public void rollBack(Object userState) {
-        restore(userState);
-    }
-
-    @Override
-    public void ackCommit(long batchId, long timeStamp) {
-        // remove obsolete state in bolt local and remote state backend
-        kvStoreManager.remove(batchId);
-    }
-
-    @Override
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        sourceSpout.declareOutputFields(declarer);
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return sourceSpout.getComponentConfiguration();
-    }
-
-    @Override
-    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-        try {
-            sourceSpout.open(conf, context, collector);
-            String storeName = String.format("task-%s", context.getThisTaskId());
-            String storePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
-            kvStoreManager = KvStoreManagerFactory.getKvStoreManagerWithMonitor(context, storeName, storePath, true);
-
-            reader = sourceSpout.getUnboundedSourceReader();
-        } catch (IOException e) {
-            LOG.error("Failed to open transactional unbounded source spout", e);
-            throw new RuntimeException(e.getMessage());
-        }
-    }
-
-    @Override
-    public void close() {
-        sourceSpout.close();
-    }
-
-    @Override
-    public void activate() {
-        sourceSpout.activate();
-    }
-
-    @Override
-    public void deactivate() {
-        sourceSpout.deactivate();
-    }
-
-    @Override
-    public void nextTuple() {
-        sourceSpout.nextTuple();
-    }
-
-    @Override
-    public void ack(Object msgId) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public void fail(Object msgId) {
-        throw new UnsupportedOperationException();
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/UnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/UnboundedSourceSpout.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/UnboundedSourceSpout.java
deleted file mode 100644
index 7a4b269..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/UnboundedSourceSpout.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import com.alibaba.jstorm.beam.translation.util.CommonInstance;
-import com.alibaba.jstorm.cache.IKvStore;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.alibaba.jstorm.cache.KvStoreManagerFactory;
-import com.alibaba.jstorm.cluster.Common;
-import com.alibaba.jstorm.utils.KryoSerializer;
-import com.alibaba.jstorm.window.Watermark;
-import org.apache.beam.sdk.io.UnboundedSource;
-import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
-import org.apache.beam.sdk.transforms.windowing.PaneInfo;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.TupleTag;
-
-import com.alibaba.jstorm.beam.StormPipelineOptions;
-import com.alibaba.jstorm.beam.util.SerializedPipelineOptions;
-
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Values;
-
-import org.joda.time.Instant;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Spout implementation that wraps a Beam UnboundedSource
- *
- * TODO: add wrapper to support metrics in UnboundedSource.
- */
-public class UnboundedSourceSpout extends AdaptorBasicSpout {
-    private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceSpout.class);
-
-    private final String description;
-    private final UnboundedSource source;
-    private final SerializedPipelineOptions serializedOptions;
-    private final TupleTag<?> outputTag;
-
-    private transient StormPipelineOptions pipelineOptions;
-    private transient UnboundedSource.UnboundedReader reader;
-    private transient SpoutOutputCollector collector;
-
-    private volatile boolean hasNextRecord;
-    private AtomicBoolean activated = new AtomicBoolean();
-
-    private KryoSerializer<WindowedValue> serializer;
-
-    private long lastWaterMark = 0l;
-
-    public UnboundedSourceSpout(
-            String description,
-            UnboundedSource source,
-            StormPipelineOptions options,
-            TupleTag<?> outputTag) {
-        this.description = checkNotNull(description, "description");
-        this.source = checkNotNull(source, "source");
-        this.serializedOptions = new SerializedPipelineOptions(checkNotNull(options, "options"));
-        this.outputTag = checkNotNull(outputTag, "outputTag");
-    }
-
-    @Override
-    public synchronized void close() {
-        try {
-            activated.set(false);
-            this.reader.close();
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-    }
-
-    @Override
-    public void activate() {
-        activated.set(true);
-        
-    }
-
-    @Override
-    public void deactivate() {
-        activated.set(false);
-    }
-
-    @Override
-    public void ack(Object msgId) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public void fail(Object msgId) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
-    }
-
-    @Override
-    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-        try {
-            this.collector = collector;
-            this.pipelineOptions = this.serializedOptions.getPipelineOptions().as(StormPipelineOptions.class);
-
-            createSourceReader(null);
-
-            this.serializer = new KryoSerializer<>(conf);
-        } catch (IOException e) {
-            throw new RuntimeException("Unable to create unbounded reader.", e);
-        }
-    }
-
-    public void createSourceReader(UnboundedSource.CheckpointMark checkpointMark) throws IOException {
-        if (reader != null) {
-            reader.close();
-        }
-        reader = this.source.createReader(this.pipelineOptions, checkpointMark);
-        hasNextRecord = this.reader.start();
-    }
-
-    @Override
-    public synchronized void nextTuple() {
-        if (!activated.get()) {
-            return;
-        }
-        try {
-            if (!hasNextRecord) {
-                hasNextRecord = reader.advance();
-            }
-
-            while (hasNextRecord && activated.get()) {
-                Object value = reader.getCurrent();
-                Instant timestamp = reader.getCurrentTimestamp();
-
-                WindowedValue wv = WindowedValue.of(value, timestamp, GlobalWindow.INSTANCE, PaneInfo.NO_FIRING);
-                LOG.debug("Source output: " + wv.getValue());
-                if (keyedEmit(outputTag.getId())) {
-                    KV kv = (KV) wv.getValue();
-                    // Convert WindowedValue<KV> to <K, WindowedValue<V>>
-                    byte[] immutableValue = serializer.serialize(wv.withValue(kv.getValue()));
-                    collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableValue));
-                } else {
-                    byte[] immutableValue = serializer.serialize(wv);
-                    collector.emit(outputTag.getId(), new Values(immutableValue));
-                }
-
-                // move to next record
-                hasNextRecord = reader.advance();
-            }
-
-            Instant waterMark = reader.getWatermark();
-            if (waterMark != null && lastWaterMark <  waterMark.getMillis()) {
-                lastWaterMark = waterMark.getMillis();
-                collector.flush();
-                collector.emit(CommonInstance.BEAM_WATERMARK_STREAM_ID, new Values(waterMark.getMillis()));
-                LOG.debug("Source output: WM-{}", waterMark.toDateTime());
-            }
-        } catch (IOException e) {
-            throw new RuntimeException("Exception reading values from source.", e);
-        }
-    }
-
-    public UnboundedSource getUnboundedSource() {
-        return source;
-    }
-
-    public UnboundedSource.UnboundedReader getUnboundedSourceReader() {
-        return reader;
-    }
-
-    @Override
-    public String toString() {
-        return description;
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ViewExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ViewExecutor.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ViewExecutor.java
deleted file mode 100644
index a297b50..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ViewExecutor.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import org.apache.beam.sdk.transforms.View;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-/**
- * JStorm {@link Executor} for {@link View}.
- */
-public class ViewExecutor implements Executor {
-
-    private final String description;
-    private final TupleTag outputTag;
-    private ExecutorsBolt executorsBolt;
-
-    public ViewExecutor(String description, TupleTag outputTag) {
-        this.description = description;
-        this.outputTag = outputTag;
-    }
-
-    @Override
-    public void init(ExecutorContext context) {
-        this.executorsBolt = context.getExecutorsBolt();
-    }
-
-    @Override
-    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-        executorsBolt.processExecutorElem(outputTag, elem);
-    }
-
-    @Override
-    public void cleanup() {}
-
-    @Override
-    public String toString() {
-        return description;
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/WindowAssignExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/WindowAssignExecutor.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/WindowAssignExecutor.java
deleted file mode 100644
index b26c3e5..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/WindowAssignExecutor.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import com.google.common.collect.Iterables;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.transforms.windowing.WindowFn;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.TupleTag;
-import org.joda.time.Instant;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collection;
-
-import static com.google.common.base.Preconditions.checkArgument;
-
-public class WindowAssignExecutor<T, W extends BoundedWindow> implements Executor {
-    private static final Logger LOG = LoggerFactory.getLogger(WindowAssignExecutor.class);
-
-    private final String description;
-    private WindowFn<T, W> windowFn;
-    private ExecutorsBolt executorsBolt;
-    private TupleTag outputTag;
-
-    class JStormAssignContext<InputT, W extends BoundedWindow>
-            extends WindowFn<InputT, W>.AssignContext {
-        private final WindowedValue<InputT> value;
-
-        JStormAssignContext(WindowFn<InputT, W> fn, WindowedValue<InputT> value) {
-            fn.super();
-            checkArgument(
-                    Iterables.size(value.getWindows()) == 1,
-                    String.format(
-                            "%s passed to window assignment must be in a single window, but it was in %s: %s",
-                            WindowedValue.class.getSimpleName(),
-                            Iterables.size(value.getWindows()),
-                            value.getWindows()));
-            this.value = value;
-        }
-
-        @Override
-        public InputT element() {
-            return value.getValue();
-        }
-
-        @Override
-        public Instant timestamp() {
-            return value.getTimestamp();
-        }
-
-        @Override
-        public BoundedWindow window() {
-            return Iterables.getOnlyElement(value.getWindows());
-        }
-    }
-
-    public WindowAssignExecutor(String description, WindowFn<T, W> windowFn, TupleTag outputTag) {
-        this.description = description;
-        this.windowFn = windowFn;
-        this.outputTag = outputTag;
-    }
-
-    @Override
-    public void init(ExecutorContext context) {
-        this.executorsBolt = context.getExecutorsBolt();
-    }
-
-    @Override
-    public void process(TupleTag tag, WindowedValue elem) {
-        Collection<W> windows = null;
-        try {
-            windows = windowFn.assignWindows(new JStormAssignContext<>(windowFn, elem));
-            for (W window: windows) {
-                executorsBolt.processExecutorElem(
-                        outputTag,
-                        WindowedValue.of(elem.getValue(), elem.getTimestamp(), window, elem.getPane()));
-            }
-        } catch (Exception e) {
-            LOG.warn("Failed to assign windows for elem=" + elem, e);
-        }
-    }
-
-    @Override
-    public void cleanup() {}
-
-
-    @Override
-    public String toString() {
-        return description;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormBagState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormBagState.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormBagState.java
deleted file mode 100644
index 7f77a78..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormBagState.java
+++ /dev/null
@@ -1,178 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime.state;
-
-import com.alibaba.jstorm.cache.ComposedKey;
-import com.alibaba.jstorm.cache.IKvStore;
-import com.alibaba.jstorm.cache.KvStoreIterable;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.annotation.Nullable;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * JStorm implementation of {@link BagState}.
- */
-class JStormBagState<K, T> implements BagState<T> {
-    private static final Logger LOG = LoggerFactory.getLogger(JStormBagState.class);
-
-    @Nullable
-    private final K key;
-    private final StateNamespace namespace;
-    private final IKvStore<ComposedKey, T> kvState;
-    private final IKvStore<ComposedKey, Object> stateInfoKvState;
-    private int elemIndex;
-
-    public JStormBagState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState,
-                           IKvStore<ComposedKey, Object> stateInfoKvState) throws IOException {
-        this.key = key;
-        this.namespace = checkNotNull(namespace, "namespace");
-        this.kvState = checkNotNull(kvState, "kvState");
-        this.stateInfoKvState = checkNotNull(stateInfoKvState, "stateInfoKvState");
-
-        Integer index = (Integer) stateInfoKvState.get(getComposedKey());
-        this.elemIndex =  index != null ? ++index : 0;
-    }
-
-    @Override
-    public void add(T input) {
-        try {
-            kvState.put(getComposedKey(elemIndex), input);
-            stateInfoKvState.put(getComposedKey(), elemIndex);
-            elemIndex++;
-        } catch (IOException e) {
-            throw new RuntimeException(e.getCause());
-        }
-    }
-
-    @Override
-    public ReadableState<Boolean> isEmpty() {
-        return new ReadableState<Boolean>() {
-            @Override
-            public Boolean read() {
-                return elemIndex <= 0;
-            }
-
-            @Override
-            public ReadableState<Boolean> readLater() {
-                // TODO: support prefetch.
-                return this;
-            }
-        };
-    }
-
-    @Override
-    public Iterable<T> read() {
-        return new BagStateIterable(elemIndex);
-    }
-
-    @Override
-    public BagState readLater() {
-        // TODO: support prefetch.
-        return this;
-    }
-
-    @Override
-    public void clear() {
-        try {
-            for (int i = 0; i < elemIndex; i++) {
-                kvState.remove(getComposedKey(i));
-            }
-            stateInfoKvState.remove(getComposedKey());
-            elemIndex = 0;
-        } catch (IOException e) {
-            throw new RuntimeException(e.getCause());
-        }
-    }
-
-    private ComposedKey getComposedKey() {
-        return ComposedKey.of(key, namespace);
-    }
-
-    private ComposedKey getComposedKey(int elemIndex) {
-        return ComposedKey.of(key, namespace, elemIndex);
-    }
-
-    private class BagStateIterable implements KvStoreIterable<T> {
-
-        private class BagStateIterator implements Iterator<T> {
-            private final int size;
-            private int cursor = 0;
-
-            BagStateIterator() {
-                Integer s = null;
-                try {
-                    s = (Integer) stateInfoKvState.get(getComposedKey());
-                } catch (IOException e) {
-                    LOG.error("Failed to get elemIndex for key={}", getComposedKey());
-                }
-                this.size = s != null ? ++s : 0;
-            }
-
-            @Override
-            public boolean hasNext() {
-                return cursor < size;
-            }
-
-            @Override
-            public T next() {
-                if (cursor >= size) {
-                    throw new NoSuchElementException();
-                }
-
-                T value = null;
-                try {
-                    value = kvState.get(getComposedKey(cursor));
-                } catch (IOException e) {
-                    LOG.error("Failed to read composed key-[{}]", getComposedKey(cursor));
-                }
-                cursor++;
-                return value;
-            }
-
-            @Override
-            public void remove() {
-                throw new UnsupportedOperationException();
-            }
-        }
-
-        private final int size;
-
-        BagStateIterable(int size) {
-            this.size = size;
-        }
-
-        @Override
-        public Iterator<T> iterator() {
-            return new BagStateIterator();
-        }
-
-        @Override
-        public String toString() {
-            return String.format("BagStateIterable: composedKey=%s", getComposedKey());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormCombiningState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormCombiningState.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormCombiningState.java
deleted file mode 100644
index 496dc5e..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormCombiningState.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime.state;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import javax.annotation.Nullable;
-
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.CombiningState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.apache.beam.sdk.transforms.Combine;
-
-/**
- * JStorm implementation of {@link CombiningState}.
- */
-public class JStormCombiningState<InputT, AccumT, OutputT>
-        implements CombiningState<InputT, AccumT, OutputT> {
-
-    @Nullable
-    private final BagState<AccumT> accumBagState;
-    private final Combine.CombineFn<InputT, AccumT, OutputT> combineFn;
-    JStormCombiningState(
-            BagState<AccumT> accumBagState,
-            Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
-        this.accumBagState = checkNotNull(accumBagState, "accumBagState");
-        this.combineFn = checkNotNull(combineFn, "combineFn");
-    }
-
-    @Override
-    public AccumT getAccum() {
-        // TODO: replacing the accumBagState with the merged accum.
-        return combineFn.mergeAccumulators(accumBagState.read());
-    }
-
-    @Override
-    public void addAccum(AccumT accumT) {
-        accumBagState.add(accumT);
-    }
-
-    @Override
-    public AccumT mergeAccumulators(Iterable<AccumT> iterable) {
-        return combineFn.mergeAccumulators(iterable);
-    }
-
-    @Override
-    public void add(InputT input) {
-        accumBagState.add(
-                combineFn.addInput(combineFn.createAccumulator(), input));
-    }
-
-    @Override
-    public ReadableState<Boolean> isEmpty() {
-        return accumBagState.isEmpty();
-    }
-
-    @Override
-    public OutputT read() {
-        return combineFn.extractOutput(
-            combineFn.mergeAccumulators(accumBagState.read()));
-    }
-
-    @Override
-    public CombiningState<InputT, AccumT, OutputT> readLater() {
-        // TODO: support prefetch.
-        return this;
-    }
-
-    @Override
-    public void clear() {
-        accumBagState.clear();
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormMapState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormMapState.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormMapState.java
deleted file mode 100644
index 5d5bbbf..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormMapState.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime.state;
-
-import com.alibaba.jstorm.cache.IKvStore;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.MapState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Map;
-
-public class JStormMapState<K, V> implements MapState<K, V> {
-    private static final Logger LOG = LoggerFactory.getLogger(JStormMapState.class);
-
-    private final K key;
-    private final StateNamespace namespace;
-    private IKvStore<K, V> kvStore;
-
-    public JStormMapState(K key, StateNamespace namespace, IKvStore<K, V> kvStore) {
-        this.key = key;
-        this.namespace = namespace;
-        this.kvStore = kvStore;
-    }
-
-    @Override
-    public void put(K var1, V var2) {
-        try {
-            kvStore.put(var1, var2);
-        } catch (IOException e) {
-            reportError(String.format("Failed to put key=%s, value=%s", var1, var2), e);
-        }
-    }
-
-    @Override
-    public ReadableState<V> putIfAbsent(K var1, V var2) {
-        ReadableState<V> ret = null;
-        try {
-            V value = kvStore.get(var1);
-            if (value == null) {
-                kvStore.put(var1, var2);
-                ret = new MapReadableState<>(null);
-            } else {
-                ret = new MapReadableState<>(value);
-            }
-        } catch (IOException e) {
-            reportError(String.format("Failed to putIfAbsent key=%s, value=%s", var1, var2), e);
-        }
-        return ret;
-    }
-
-    @Override
-    public void remove(K var1) {
-        try {
-            kvStore.remove(var1);
-        } catch (IOException e) {
-            reportError(String.format("Failed to remove key=%s", var1), e);
-        }
-    }
-
-    @Override
-    public ReadableState<V> get(K var1) {
-        ReadableState<V> ret = new MapReadableState<>(null);
-        try {
-            ret = new MapReadableState(kvStore.get(var1));
-        } catch (IOException e) {
-            reportError(String.format("Failed to get value for key=%s", var1), e);
-        }
-        return ret;
-    }
-
-    @Override
-    public ReadableState<Iterable<K>> keys() {
-        ReadableState<Iterable<K>> ret = new MapReadableState<>(null);
-        try {
-            ret = new MapReadableState<>(kvStore.keys());
-        } catch (IOException e) {
-            reportError(String.format("Failed to get keys"), e);
-        }
-        return ret;
-    }
-
-    @Override
-    public ReadableState<Iterable<V>> values() {
-        ReadableState<Iterable<V>> ret = new MapReadableState<>(null);
-        try {
-            ret = new MapReadableState<>(kvStore.values());
-        } catch (IOException e) {
-            reportError(String.format("Failed to get values"), e);
-        }
-        return ret;
-    }
-
-    @Override
-    public ReadableState<Iterable<Map.Entry<K, V>>> entries() {
-        ReadableState<Iterable<Map.Entry<K, V>>> ret = new MapReadableState<>(null);
-        try {
-            ret = new MapReadableState<>(kvStore.entries());
-        } catch (IOException e) {
-            reportError(String.format("Failed to get values"), e);
-        }
-        return ret;
-    }
-
-    @Override
-    public void clear() {
-        try {
-            Iterable<K> keys = kvStore.keys();
-            kvStore.removeBatch(keys);
-        } catch (IOException e) {
-            reportError(String.format("Failed to clear map state"), e);
-        }
-    }
-
-    private void reportError(String errorInfo, IOException e) {
-        LOG.error(errorInfo, e);
-        throw new RuntimeException(errorInfo);
-    }
-
-    private class MapReadableState<T> implements ReadableState<T> {
-        private T value;
-
-        public MapReadableState(T value) {
-            this.value = value;
-        }
-
-        @Override
-        public T read() {
-            return value;
-        }
-
-        @Override
-        public ReadableState<T> readLater() {
-            return this;
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormStateInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormStateInternals.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormStateInternals.java
deleted file mode 100644
index 4a202cf..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormStateInternals.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime.state;
-
-import com.alibaba.jstorm.beam.translation.runtime.TimerService;
-import com.alibaba.jstorm.cache.ComposedKey;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-
-import org.apache.beam.runners.core.StateInternals;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.runners.core.StateTag;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.CombiningState;
-import org.apache.beam.sdk.state.MapState;
-import org.apache.beam.sdk.state.SetState;
-import org.apache.beam.sdk.state.State;
-import org.apache.beam.sdk.state.StateBinder;
-import org.apache.beam.sdk.state.StateContext;
-import org.apache.beam.sdk.state.StateSpec;
-import org.apache.beam.sdk.state.ValueState;
-import org.apache.beam.sdk.state.WatermarkHoldState;
-import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.Combine.BinaryCombineFn;
-import org.apache.beam.sdk.transforms.CombineWithContext;
-import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
-import org.joda.time.Instant;
-
-import javax.annotation.Nullable;
-import java.io.IOException;
-import java.util.List;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * JStorm implementation of {@link StateInternals}.
- */
-public class JStormStateInternals<K> implements StateInternals {
-
-    private static final String STATE_INFO = "state-info:";
-
-    @Nullable
-    private final K key;
-    private final IKvStoreManager kvStoreManager;
-    private final TimerService timerService;
-    private final int executorId;
-
-    public JStormStateInternals(K key, IKvStoreManager kvStoreManager,
-                                TimerService timerService, int executorId) {
-        this.key = key;
-        this.kvStoreManager = checkNotNull(kvStoreManager, "kvStoreManager");
-        this.timerService = checkNotNull(timerService, "timerService");
-        this.executorId = executorId;
-    }
-
-    @Nullable
-    @Override
-    public K getKey() {
-        return key;
-    }
-
-    @Override
-    public <T extends State> T state(
-        StateNamespace namespace, StateTag<T> address, StateContext<?> c) {
-        // throw new UnsupportedOperationException("StateContext is not supported.");
-        /**
-         * TODO:
-         * Same implementation as state() which is without StateContext. This might be updated after
-         * we figure out if we really need StateContext for JStorm state internals.
-         */
-        return state(namespace, address);
-    }
-
-    @Override
-    public <T extends State> T state(final StateNamespace namespace, StateTag<T> address) {
-        return address.getSpec().bind(address.getId(), new StateBinder() {
-            @Override
-            public <T> ValueState<T> bindValue(String id, StateSpec<ValueState<T>> spec, Coder<T> coder) {
-                try {
-                    return new JStormValueState<>(
-                            getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)));
-                } catch (IOException e) {
-                    throw new RuntimeException();
-                }
-            }
-
-            @Override
-            public <T> BagState<T> bindBag(String id, StateSpec<BagState<T>> spec, Coder<T> elemCoder) {
-                try {
-                    return new JStormBagState(
-                            getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)),
-                            kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
-                } catch (IOException e) {
-                    throw new RuntimeException();
-                }
-            }
-
-            @Override
-            public <T> SetState<T> bindSet(String id, StateSpec<SetState<T>> spec, Coder<T> elemCoder) {
-                throw new UnsupportedOperationException();
-            }
-
-            @Override
-            public <KeyT, ValueT> MapState<KeyT, ValueT> bindMap(
-                String id,
-                StateSpec<MapState<KeyT, ValueT>> spec,
-                Coder<KeyT> mapKeyCoder,
-                Coder<ValueT> mapValueCoder) {
-                try {
-                    return new JStormMapState<>(getKey(), namespace, kvStoreManager.<KeyT, ValueT>getOrCreate(getStoreId(id)));
-                } catch (IOException e) {
-                    throw new RuntimeException(e);
-                }
-            }
-
-            @Override
-            public <InputT, AccumT, OutputT> CombiningState bindCombining(
-                    String id,
-                    StateSpec<CombiningState<InputT, AccumT, OutputT>> spec,
-                    Coder<AccumT> accumCoder,
-                    Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
-                try {
-                    BagState<AccumT> accumBagState = new JStormBagState(
-                            getKey(), namespace,
-                            kvStoreManager.<ComposedKey, AccumT>getOrCreate(getStoreId(id)),
-                            kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
-                    return new JStormCombiningState<>(accumBagState, combineFn);
-                } catch (IOException e) {
-                    throw new RuntimeException();
-                }
-            }
-
-
-            @Override
-            public <InputT, AccumT, OutputT> CombiningState<InputT, AccumT, OutputT>
-            bindCombiningWithContext(
-                String id,
-                StateSpec<CombiningState<InputT, AccumT, OutputT>> stateSpec, Coder<AccumT> coder,
-                CombineWithContext.CombineFnWithContext<InputT, AccumT, OutputT> combineFnWithContext) {
-                throw new UnsupportedOperationException();
-            }
-
-            @Override
-            public WatermarkHoldState bindWatermark(
-                String id,
-                StateSpec<WatermarkHoldState> spec,
-                final TimestampCombiner timestampCombiner) {
-                try {
-                    BagState<Combine.Holder<Instant>> accumBagState = new JStormBagState(
-                            getKey(), namespace,
-                            kvStoreManager.<ComposedKey, Combine.Holder<Instant>>getOrCreate(getStoreId(id)),
-                            kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
-
-                    Combine.CombineFn<Instant, Combine.Holder<Instant>, Instant> outputTimeCombineFn =
-                            new BinaryCombineFn<Instant>() {
-                                @Override
-                                public Instant apply(Instant left, Instant right) {
-                                  return timestampCombiner.combine(left, right);
-                                }};
-                    return new JStormWatermarkHoldState(
-                            namespace,
-                            new JStormCombiningState<>(
-                                    accumBagState,
-                                    outputTimeCombineFn),
-                            timestampCombiner,
-                            timerService);
-                } catch (IOException e) {
-                    throw new RuntimeException();
-                }
-            }
-        });
-    }
-
-    private String getStoreId(String stateId) {
-        return String.format("%s-%s", stateId, executorId);
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormValueState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormValueState.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormValueState.java
deleted file mode 100644
index 3d5c68b..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormValueState.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime.state;
-
-import com.alibaba.jstorm.cache.ComposedKey;
-import com.alibaba.jstorm.cache.IKvStore;
-
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.ValueState;
-
-import javax.annotation.Nullable;
-import java.io.IOException;
-
-/**
- * JStorm implementation of {@link ValueState}.
- */
-public class JStormValueState<K, T> implements ValueState<T> {
-
-    @Nullable
-    private final K key;
-    private final StateNamespace namespace;
-    private final IKvStore<ComposedKey, T> kvState;
-
-    JStormValueState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState) {
-        this.key = key;
-        this.namespace = namespace;
-        this.kvState = kvState;
-    }
-
-    @Override
-    public void write(T t) {
-        try {
-            kvState.put(getComposedKey(), t);
-        } catch (IOException e) {
-            throw new RuntimeException(String.format(
-                    "Failed to write key: %s, namespace: %s, value: %s.", key, namespace, t));
-        }
-    }
-
-    @Override
-    public T read() {
-        try {
-            return kvState.get(getComposedKey());
-        } catch (IOException e) {
-            throw new RuntimeException(String.format(
-                    "Failed to read key: %s, namespace: %s.", key, namespace));
-        }
-    }
-
-    @Override
-    public ValueState<T> readLater() {
-        // TODO: support prefetch.
-        return this;
-    }
-
-    @Override
-    public void clear() {
-        try {
-            kvState.remove(getComposedKey());
-        } catch (IOException e) {
-            throw new RuntimeException(String.format(
-                    "Failed to clear key: %s, namespace: %s.", key, namespace));
-        }
-    }
-
-    private ComposedKey getComposedKey() {
-        return ComposedKey.of(key, namespace);
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormWatermarkHoldState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormWatermarkHoldState.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormWatermarkHoldState.java
deleted file mode 100644
index 7888d85..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormWatermarkHoldState.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime.state;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import com.alibaba.jstorm.beam.translation.runtime.TimerService;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.GroupingState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.apache.beam.sdk.state.WatermarkHoldState;
-import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
-import org.joda.time.Instant;
-
-/**
- * JStorm implementation of {@link WatermarkHoldState}.
- */
-public class JStormWatermarkHoldState implements WatermarkHoldState {
-
-    private final StateNamespace namespace;
-    private final GroupingState<Instant, Instant> watermarkHoldsState;
-    private final TimestampCombiner timestampCombiner;
-    private final TimerService timerService;
-
-    JStormWatermarkHoldState(
-            StateNamespace namespace,
-            GroupingState<Instant, Instant> watermarkHoldsState,
-            TimestampCombiner timestampCombiner,
-            TimerService timerService) {
-        this.namespace = checkNotNull(namespace, "namespace");
-        this.watermarkHoldsState = checkNotNull(watermarkHoldsState, "watermarkHoldsState");
-        this.timestampCombiner = checkNotNull(timestampCombiner, "timestampCombiner");
-        this.timerService = checkNotNull(timerService, "timerService");
-    }
-
-    @Override
-    public TimestampCombiner getTimestampCombiner() {
-        return timestampCombiner;
-    }
-
-    @Override
-    public void add(Instant instant) {
-        timerService.addWatermarkHold(namespace.stringKey(), instant);
-        watermarkHoldsState.add(instant);
-    }
-
-    @Override
-    public ReadableState<Boolean> isEmpty() {
-        return watermarkHoldsState.isEmpty();
-    }
-
-    @Override
-    public Instant read() {
-        return watermarkHoldsState.read();
-    }
-
-    @Override
-    public WatermarkHoldState readLater() {
-        // TODO: support prefetch.
-        return this;
-    }
-
-    @Override
-    public void clear() {
-        timerService.clearWatermarkHold(namespace.stringKey());
-        watermarkHoldsState.clear();
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/timer/JStormTimerInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/timer/JStormTimerInternals.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/timer/JStormTimerInternals.java
deleted file mode 100644
index 9aba566..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/timer/JStormTimerInternals.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime.timer;
-
-import com.alibaba.jstorm.beam.translation.runtime.DoFnExecutor;
-import com.alibaba.jstorm.beam.translation.runtime.TimerService;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.TimeDomain;
-import org.joda.time.Instant;
-
-import javax.annotation.Nullable;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * JStorm implementation of {@link TimerInternals}.
- */
-public class JStormTimerInternals<K> implements TimerInternals {
-
-    private final K key;
-    private final DoFnExecutor<?, ?> doFnExecutor;
-    private final TimerService timerService;
-
-
-    public JStormTimerInternals(@Nullable K key, DoFnExecutor<?, ?> doFnExecutor, TimerService timerService) {
-        this.key = key;
-        this.doFnExecutor = checkNotNull(doFnExecutor, "doFnExecutor");
-        this.timerService = checkNotNull(timerService, "timerService");
-    }
-
-    @Override
-    public void setTimer(StateNamespace namespace, String timerId, Instant target, TimeDomain timeDomain) {
-        setTimer(TimerData.of(timerId, namespace, target, timeDomain));
-    }
-
-    @Override
-    @Deprecated
-    public void setTimer(TimerData timerData) {
-        timerService.setTimer(key, timerData, doFnExecutor);
-    }
-
-    @Override
-    public void deleteTimer(StateNamespace namespace, String timerId, TimeDomain timeDomain) {
-        throw new UnsupportedOperationException(
-                "Canceling of a timer is not yet supported.");
-    }
-
-    @Override
-    @Deprecated
-    public void deleteTimer(StateNamespace namespace, String timerId) {
-        throw new UnsupportedOperationException(
-                "Canceling of a timer is not yet supported.");
-    }
-
-    @Override
-    @Deprecated
-    public void deleteTimer(TimerData timerData) {
-        throw new UnsupportedOperationException(
-                "Canceling of a timer is not yet supported.");
-    }
-
-    @Override
-    public Instant currentProcessingTime() {
-        return Instant.now();
-    }
-
-    @Override
-    @Nullable
-    public Instant currentSynchronizedProcessingTime() {
-        return null;
-    }
-
-    @Override
-    public Instant currentInputWatermarkTime() {
-        return new Instant(timerService.currentInputWatermark());
-    }
-
-    @Override
-    @Nullable
-    public Instant currentOutputWatermarkTime() {
-        return new Instant(timerService.currentOutputWatermark());
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/BoundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/BoundedSourceTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/BoundedSourceTranslator.java
deleted file mode 100644
index fc494ac..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/BoundedSourceTranslator.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-import com.alibaba.jstorm.beam.translation.runtime.UnboundedSourceSpout;
-
-import org.apache.beam.runners.core.construction.UnboundedReadFromBoundedSource;
-import org.apache.beam.sdk.io.Read;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.TaggedPValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-/**
- * Translates a {@link Read.Bounded} into a Storm spout.
- *
- * @param <T>
- */
-public class BoundedSourceTranslator<T> extends TransformTranslator.Default<Read.Bounded<T>> {
-
-    @Override
-    public void translateNode(Read.Bounded<T> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-
-        TupleTag<?> outputTag = userGraphContext.getOutputTag();
-        PValue outputValue = userGraphContext.getOutput();
-        UnboundedSourceSpout spout = new UnboundedSourceSpout(
-                description,
-                new UnboundedReadFromBoundedSource.BoundedToUnboundedSourceAdapter(transform.getSource()),
-                userGraphContext.getOptions(), outputTag);
-
-        context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(outputTag, outputValue));
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/CombineGloballyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/CombineGloballyTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/CombineGloballyTranslator.java
deleted file mode 100644
index 03bcaff..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/CombineGloballyTranslator.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import org.apache.beam.sdk.transforms.Combine;
-
-public class CombineGloballyTranslator<InputT, OutputT> extends TransformTranslator.Default<Combine.Globally<InputT, OutputT>> {
-    
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/CombinePerKeyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/CombinePerKeyTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/CombinePerKeyTranslator.java
deleted file mode 100644
index d37bd01..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/CombinePerKeyTranslator.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import org.apache.beam.sdk.transforms.Combine;
-
-public class CombinePerKeyTranslator<K, InputT, OutputT> extends TransformTranslator.Default<Combine.PerKey<K, InputT, OutputT>> {
-    
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/FlattenTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/FlattenTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/FlattenTranslator.java
deleted file mode 100644
index 6eba601..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/FlattenTranslator.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import com.google.common.collect.Maps;
-import org.apache.beam.sdk.transforms.Flatten;
-
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-import com.alibaba.jstorm.beam.translation.runtime.FlattenExecutor;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-import java.util.Map;
-
-public class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PCollections<V>> {
-
-    @Override
-    public void translateNode(Flatten.PCollections<V> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-
-        // Since a new tag is created in PCollectionList, retrieve the real tag here.
-        Map<TupleTag<?>, PValue> inputs = Maps.newHashMap();
-        for (Map.Entry<TupleTag<?>, PValue> entry : userGraphContext.getInputs().entrySet()) {
-            PCollection<V> pc = (PCollection<V>) entry.getValue();
-            inputs.putAll(pc.expand());
-        }
-        System.out.println("Real inputs: " + inputs);
-        System.out.println("FlattenList inputs: " + userGraphContext.getInputs());
-        String description = describeTransform(transform, inputs, userGraphContext.getOutputs());
-        FlattenExecutor executor = new FlattenExecutor(description, userGraphContext.getOutputTag());
-        context.addTransformExecutor(executor, inputs, userGraphContext.getOutputs());
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/GroupByKeyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/GroupByKeyTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/GroupByKeyTranslator.java
deleted file mode 100644
index c8ff467..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/GroupByKeyTranslator.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import com.alibaba.jstorm.beam.translation.runtime.GroupByWindowExecutor;
-import com.google.common.collect.Lists;
-import org.apache.beam.sdk.transforms.GroupByKey;
-
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-
-import java.util.Collections;
-import java.util.List;
-
-public class GroupByKeyTranslator<K, V> extends TransformTranslator.Default<GroupByKey<K, V>> {
-    // information of transform
-    protected PCollection<KV<K, V>> input;
-    protected PCollection<KV<K, Iterable<V>>> output;
-    protected List<TupleTag<?>> inputTags;
-    protected TupleTag<KV<K, Iterable<V>>> mainOutputTag;
-    protected List<TupleTag<?>> sideOutputTags;
-    protected List<PCollectionView<?>> sideInputs;
-    protected WindowingStrategy<?, ?> windowingStrategy;
-
-    @Override
-    public void translateNode(GroupByKey<K, V> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-
-        input = (PCollection<KV<K, V>>) userGraphContext.getInput();
-        output = (PCollection<KV<K, Iterable<V>>>) userGraphContext.getOutput();
-
-        inputTags = userGraphContext.getInputTags();
-        mainOutputTag = (TupleTag<KV<K, Iterable<V>>>) userGraphContext.getOutputTag();
-        sideOutputTags = Lists.newArrayList();
-
-        sideInputs = Collections.<PCollectionView<?>>emptyList();
-        windowingStrategy = input.getWindowingStrategy();
-
-        GroupByWindowExecutor<K, V> groupByWindowExecutor = new GroupByWindowExecutor<>(
-                userGraphContext.getStepName(),
-                description,
-                context,
-                context.getUserGraphContext().getOptions(),
-                windowingStrategy,
-                mainOutputTag,
-                sideOutputTags);
-        context.addTransformExecutor(groupByWindowExecutor);
-    }
-}


[24/53] [abbrv] beam git commit: jstorm-runner: move most classes to translation package and reduece their visibility to package private.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
new file mode 100644
index 0000000..fce870f
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
@@ -0,0 +1,190 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.alibaba.jstorm.cache.ComposedKey;
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import java.io.IOException;
+import javax.annotation.Nullable;
+import org.apache.beam.runners.core.StateInternals;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.runners.core.StateTag;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.state.BagState;
+import org.apache.beam.sdk.state.CombiningState;
+import org.apache.beam.sdk.state.MapState;
+import org.apache.beam.sdk.state.SetState;
+import org.apache.beam.sdk.state.State;
+import org.apache.beam.sdk.state.StateBinder;
+import org.apache.beam.sdk.state.StateContext;
+import org.apache.beam.sdk.state.StateSpec;
+import org.apache.beam.sdk.state.ValueState;
+import org.apache.beam.sdk.state.WatermarkHoldState;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.Combine.BinaryCombineFn;
+import org.apache.beam.sdk.transforms.CombineWithContext;
+import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
+import org.joda.time.Instant;
+
+/**
+ * JStorm implementation of {@link StateInternals}.
+ */
+class JStormStateInternals<K> implements StateInternals {
+
+  private static final String STATE_INFO = "state-info:";
+
+  @Nullable
+  private final K key;
+  private final IKvStoreManager kvStoreManager;
+  private final TimerService timerService;
+  private final int executorId;
+
+  public JStormStateInternals(K key, IKvStoreManager kvStoreManager,
+                              TimerService timerService, int executorId) {
+    this.key = key;
+    this.kvStoreManager = checkNotNull(kvStoreManager, "kvStoreManager");
+    this.timerService = checkNotNull(timerService, "timerService");
+    this.executorId = executorId;
+  }
+
+  @Nullable
+  @Override
+  public K getKey() {
+    return key;
+  }
+
+  @Override
+  public <T extends State> T state(
+      StateNamespace namespace, StateTag<T> address, StateContext<?> c) {
+    // throw new UnsupportedOperationException("StateContext is not supported.");
+    /**
+     * TODO:
+     * Same implementation as state() which is without StateContext. This might be updated after
+     * we figure out if we really need StateContext for JStorm state internals.
+     */
+    return state(namespace, address);
+  }
+
+  @Override
+  public <T extends State> T state(final StateNamespace namespace, StateTag<T> address) {
+    return address.getSpec().bind(address.getId(), new StateBinder() {
+      @Override
+      public <T> ValueState<T> bindValue(String id, StateSpec<ValueState<T>> spec, Coder<T> coder) {
+        try {
+          return new JStormValueState<>(
+              getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)));
+        } catch (IOException e) {
+          throw new RuntimeException();
+        }
+      }
+
+      @Override
+      public <T> BagState<T> bindBag(String id, StateSpec<BagState<T>> spec, Coder<T> elemCoder) {
+        try {
+          return new JStormBagState(
+              getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)),
+              kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
+        } catch (IOException e) {
+          throw new RuntimeException();
+        }
+      }
+
+      @Override
+      public <T> SetState<T> bindSet(String id, StateSpec<SetState<T>> spec, Coder<T> elemCoder) {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public <KeyT, ValueT> MapState<KeyT, ValueT> bindMap(
+          String id,
+          StateSpec<MapState<KeyT, ValueT>> spec,
+          Coder<KeyT> mapKeyCoder,
+          Coder<ValueT> mapValueCoder) {
+        try {
+          return new JStormMapState<>(
+              getKey(), namespace, kvStoreManager.<KeyT, ValueT>getOrCreate(getStoreId(id)));
+        } catch (IOException e) {
+          throw new RuntimeException(e);
+        }
+      }
+
+      @Override
+      public <InputT, AccumT, OutputT> CombiningState bindCombining(
+          String id,
+          StateSpec<CombiningState<InputT, AccumT, OutputT>> spec,
+          Coder<AccumT> accumCoder,
+          Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
+        try {
+          BagState<AccumT> accumBagState = new JStormBagState(
+              getKey(), namespace,
+              kvStoreManager.<ComposedKey, AccumT>getOrCreate(getStoreId(id)),
+              kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
+          return new JStormCombiningState<>(accumBagState, combineFn);
+        } catch (IOException e) {
+          throw new RuntimeException();
+        }
+      }
+
+
+      @Override
+      public <InputT, AccumT, OutputT> CombiningState<InputT, AccumT, OutputT>
+      bindCombiningWithContext(
+          String id,
+          StateSpec<CombiningState<InputT, AccumT, OutputT>> stateSpec, Coder<AccumT> coder,
+          CombineWithContext.CombineFnWithContext<InputT, AccumT, OutputT> combineFnWithContext) {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public WatermarkHoldState bindWatermark(
+          String id,
+          StateSpec<WatermarkHoldState> spec,
+          final TimestampCombiner timestampCombiner) {
+        try {
+          BagState<Combine.Holder<Instant>> accumBagState = new JStormBagState(
+              getKey(), namespace,
+              kvStoreManager.<ComposedKey, Combine.Holder<Instant>>getOrCreate(getStoreId(id)),
+              kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
+
+          Combine.CombineFn<Instant, Combine.Holder<Instant>, Instant> outputTimeCombineFn =
+              new BinaryCombineFn<Instant>() {
+                @Override
+                public Instant apply(Instant left, Instant right) {
+                  return timestampCombiner.combine(left, right);
+                }
+              };
+          return new JStormWatermarkHoldState(
+              namespace,
+              new JStormCombiningState<>(
+                  accumBagState,
+                  outputTimeCombineFn),
+              timestampCombiner,
+              timerService);
+        } catch (IOException e) {
+          throw new RuntimeException();
+        }
+      }
+    });
+  }
+
+  private String getStoreId(String stateId) {
+    return String.format("%s-%s", stateId, executorId);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormTimerInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormTimerInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormTimerInternals.java
new file mode 100644
index 0000000..4c96541
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormTimerInternals.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import javax.annotation.Nullable;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.sdk.state.TimeDomain;
+import org.joda.time.Instant;
+
+/**
+ * JStorm implementation of {@link TimerInternals}.
+ */
+class JStormTimerInternals<K> implements TimerInternals {
+
+  private final K key;
+  private final DoFnExecutor<?, ?> doFnExecutor;
+  private final TimerService timerService;
+
+  public JStormTimerInternals(
+      @Nullable K key, DoFnExecutor<?, ?> doFnExecutor, TimerService timerService) {
+    this.key = key;
+    this.doFnExecutor = checkNotNull(doFnExecutor, "doFnExecutor");
+    this.timerService = checkNotNull(timerService, "timerService");
+  }
+
+  @Override
+  public void setTimer(
+      StateNamespace namespace, String timerId, Instant target, TimeDomain timeDomain) {
+    setTimer(TimerData.of(timerId, namespace, target, timeDomain));
+  }
+
+  @Override
+  @Deprecated
+  public void setTimer(TimerData timerData) {
+    timerService.setTimer(key, timerData, doFnExecutor);
+  }
+
+  @Override
+  public void deleteTimer(StateNamespace namespace, String timerId, TimeDomain timeDomain) {
+    throw new UnsupportedOperationException(
+        "Canceling of a timer is not yet supported.");
+  }
+
+  @Override
+  @Deprecated
+  public void deleteTimer(StateNamespace namespace, String timerId) {
+    throw new UnsupportedOperationException(
+        "Canceling of a timer is not yet supported.");
+  }
+
+  @Override
+  @Deprecated
+  public void deleteTimer(TimerData timerData) {
+    throw new UnsupportedOperationException(
+        "Canceling of a timer is not yet supported.");
+  }
+
+  @Override
+  public Instant currentProcessingTime() {
+    return Instant.now();
+  }
+
+  @Override
+  @Nullable
+  public Instant currentSynchronizedProcessingTime() {
+    return null;
+  }
+
+  @Override
+  public Instant currentInputWatermarkTime() {
+    return new Instant(timerService.currentInputWatermark());
+  }
+
+  @Override
+  @Nullable
+  public Instant currentOutputWatermarkTime() {
+    return new Instant(timerService.currentOutputWatermark());
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormValueState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormValueState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormValueState.java
new file mode 100644
index 0000000..5d79d21
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormValueState.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import com.alibaba.jstorm.cache.ComposedKey;
+import com.alibaba.jstorm.cache.IKvStore;
+import java.io.IOException;
+import javax.annotation.Nullable;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.sdk.state.ValueState;
+
+/**
+ * JStorm implementation of {@link ValueState}.
+ */
+class JStormValueState<K, T> implements ValueState<T> {
+
+  @Nullable
+  private final K key;
+  private final StateNamespace namespace;
+  private final IKvStore<ComposedKey, T> kvState;
+
+  JStormValueState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState) {
+    this.key = key;
+    this.namespace = namespace;
+    this.kvState = kvState;
+  }
+
+  @Override
+  public void write(T t) {
+    try {
+      kvState.put(getComposedKey(), t);
+    } catch (IOException e) {
+      throw new RuntimeException(String.format(
+          "Failed to write key: %s, namespace: %s, value: %s.", key, namespace, t));
+    }
+  }
+
+  @Override
+  public T read() {
+    try {
+      return kvState.get(getComposedKey());
+    } catch (IOException e) {
+      throw new RuntimeException(String.format(
+          "Failed to read key: %s, namespace: %s.", key, namespace));
+    }
+  }
+
+  @Override
+  public ValueState<T> readLater() {
+    // TODO: support prefetch.
+    return this;
+  }
+
+  @Override
+  public void clear() {
+    try {
+      kvState.remove(getComposedKey());
+    } catch (IOException e) {
+      throw new RuntimeException(String.format(
+          "Failed to clear key: %s, namespace: %s.", key, namespace));
+    }
+  }
+
+  private ComposedKey getComposedKey() {
+    return ComposedKey.of(key, namespace);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormWatermarkHoldState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormWatermarkHoldState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormWatermarkHoldState.java
new file mode 100644
index 0000000..7e1c28f
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormWatermarkHoldState.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.sdk.state.GroupingState;
+import org.apache.beam.sdk.state.ReadableState;
+import org.apache.beam.sdk.state.WatermarkHoldState;
+import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
+import org.joda.time.Instant;
+
+/**
+ * JStorm implementation of {@link WatermarkHoldState}.
+ */
+class JStormWatermarkHoldState implements WatermarkHoldState {
+
+  private final StateNamespace namespace;
+  private final GroupingState<Instant, Instant> watermarkHoldsState;
+  private final TimestampCombiner timestampCombiner;
+  private final TimerService timerService;
+
+  JStormWatermarkHoldState(
+      StateNamespace namespace,
+      GroupingState<Instant, Instant> watermarkHoldsState,
+      TimestampCombiner timestampCombiner,
+      TimerService timerService) {
+    this.namespace = checkNotNull(namespace, "namespace");
+    this.watermarkHoldsState = checkNotNull(watermarkHoldsState, "watermarkHoldsState");
+    this.timestampCombiner = checkNotNull(timestampCombiner, "timestampCombiner");
+    this.timerService = checkNotNull(timerService, "timerService");
+  }
+
+  @Override
+  public TimestampCombiner getTimestampCombiner() {
+    return timestampCombiner;
+  }
+
+  @Override
+  public void add(Instant instant) {
+    timerService.addWatermarkHold(namespace.stringKey(), instant);
+    watermarkHoldsState.add(instant);
+  }
+
+  @Override
+  public ReadableState<Boolean> isEmpty() {
+    return watermarkHoldsState.isEmpty();
+  }
+
+  @Override
+  public Instant read() {
+    return watermarkHoldsState.read();
+  }
+
+  @Override
+  public WatermarkHoldState readLater() {
+    // TODO: support prefetch.
+    return this;
+  }
+
+  @Override
+  public void clear() {
+    timerService.clearWatermarkHold(namespace.stringKey());
+    watermarkHoldsState.clear();
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java
new file mode 100644
index 0000000..82d8bdc
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static org.apache.beam.runners.core.metrics.MetricsContainerStepMap.asAttemptedOnlyMetricResults;
+
+import com.alibaba.jstorm.common.metric.AsmCounter;
+import com.alibaba.jstorm.metric.MetricClient;
+import com.google.common.collect.Maps;
+import java.util.Map;
+import org.apache.beam.runners.core.metrics.MetricsContainerStepMap;
+import org.apache.beam.sdk.metrics.MetricQueryResults;
+import org.apache.beam.sdk.metrics.MetricResult;
+import org.apache.beam.sdk.metrics.MetricResults;
+import org.apache.beam.sdk.metrics.MetricsContainer;
+import org.apache.beam.sdk.metrics.MetricsFilter;
+
+/**
+ * Class that holds a {@link MetricsContainerStepMap}, and reports metrics to JStorm engine.
+ */
+class MetricsReporter {
+
+  private static final String METRIC_KEY_SEPARATOR = "__";
+  private static final String COUNTER_PREFIX = "__counter";
+
+  private final MetricsContainerStepMap metricsContainers = new MetricsContainerStepMap();
+  private final Map<String, Long> reportedCounters = Maps.newHashMap();
+  private final MetricClient metricClient;
+
+  public static MetricsReporter create(MetricClient metricClient) {
+    return new MetricsReporter(metricClient);
+  }
+
+  private MetricsReporter(MetricClient metricClient) {
+    this.metricClient = checkNotNull(metricClient, "metricClient");
+  }
+
+  public MetricsContainer getMetricsContainer(String stepName) {
+    return metricsContainers.getContainer(stepName);
+  }
+
+  public void updateMetrics() {
+    MetricResults metricResults = asAttemptedOnlyMetricResults(metricsContainers);
+    MetricQueryResults metricQueryResults =
+        metricResults.queryMetrics(MetricsFilter.builder().build());
+    updateCounters(metricQueryResults.counters());
+  }
+
+  private void updateCounters(Iterable<MetricResult<Long>> counters) {
+    System.out.print("updateCounters");
+    for (MetricResult<Long> metricResult : counters) {
+      String metricName = getMetricNameString(COUNTER_PREFIX, metricResult);
+      System.out.print("metricName: " + metricName);
+      Long updateValue = metricResult.attempted();
+      Long oldValue = reportedCounters.get(metricName);
+
+      if (oldValue == null || oldValue < updateValue) {
+        AsmCounter counter = metricClient.registerCounter(metricName);
+        Long incValue = (oldValue == null ? updateValue : updateValue - oldValue);
+        counter.update(incValue);
+      }
+    }
+  }
+
+  private String getMetricNameString(String prefix, MetricResult<?> metricResult) {
+    return prefix
+        + METRIC_KEY_SEPARATOR + metricResult.step()
+        + METRIC_KEY_SEPARATOR + metricResult.name().namespace()
+        + METRIC_KEY_SEPARATOR + metricResult.name().name();
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java
new file mode 100644
index 0000000..49b0f85
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.runners.jstorm.JStormPipelineOptions;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.WindowingStrategy;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * JStorm {@link Executor} for {@link DoFn} with multi-output.
+ * @param <InputT>
+ * @param <OutputT>
+ */
+class MultiOutputDoFnExecutor<InputT, OutputT> extends DoFnExecutor<InputT, OutputT> {
+  private static final Logger LOG = LoggerFactory.getLogger(MultiOutputDoFnExecutor.class);
+
+  /**
+   * For multi-output scenario,a "local" tuple tag is used in producer currently while a generated
+   * tag is used in downstream consumer. So before output, we need to map this "local" tag to
+   * "external" tag. See PCollectionTuple for details.
+   */
+  public class MultiOutputDoFnExecutorOutputManager extends DoFnExecutorOutputManager {
+    @Override
+    public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
+      if (localTupleTagMap.containsKey(tag)) {
+        executorsBolt.processExecutorElem((TupleTag<T>) localTupleTagMap.get(tag), output);
+      } else {
+        executorsBolt.processExecutorElem(tag, output);
+      }
+    }
+  }
+
+  protected Map<TupleTag<?>, TupleTag<?>> localTupleTagMap;
+
+  public MultiOutputDoFnExecutor(
+      String stepName,
+      String description,
+      JStormPipelineOptions pipelineOptions,
+      DoFn<InputT, OutputT> doFn,
+      Coder<WindowedValue<InputT>> inputCoder,
+      WindowingStrategy<?, ?> windowingStrategy,
+      TupleTag<InputT> mainInputTag,
+      Collection<PCollectionView<?>> sideInputs,
+      Map<TupleTag, PCollectionView<?>> sideInputTagToView,
+      TupleTag<OutputT> mainTupleTag,
+      List<TupleTag<?>> sideOutputTags,
+      Map<TupleTag<?>, TupleTag<?>> localTupleTagMap
+  ) {
+    super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag,
+        sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
+    this.localTupleTagMap = localTupleTagMap;
+    this.outputManager = new MultiOutputDoFnExecutorOutputManager();
+    LOG.info("localTupleTagMap: {}", localTupleTagMap);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiStatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiStatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiStatefulDoFnExecutor.java
new file mode 100644
index 0000000..a3ffc30
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiStatefulDoFnExecutor.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.runners.jstorm.JStormPipelineOptions;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.WindowingStrategy;
+
+/**
+ * JStorm {@link Executor} for stateful {@link DoFn} with multi-output.
+ * @param <OutputT>
+ */
+class MultiStatefulDoFnExecutor<OutputT> extends MultiOutputDoFnExecutor<KV, OutputT> {
+
+  public MultiStatefulDoFnExecutor(
+      String stepName, String description,
+      JStormPipelineOptions pipelineOptions, DoFn<KV, OutputT> doFn,
+      Coder<WindowedValue<KV>> inputCoder, WindowingStrategy<?, ?> windowingStrategy,
+      TupleTag<KV> mainInputTag, Collection<PCollectionView<?>> sideInputs,
+      Map<TupleTag, PCollectionView<?>> sideInputTagToView, TupleTag<OutputT> mainTupleTag,
+      List<TupleTag<?>> sideOutputTags, Map<TupleTag<?>, TupleTag<?>> localTupleTagMap) {
+    super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag,
+        sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags, localTupleTagMap);
+  }
+
+  @Override
+  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+    if (mainInputTag.equals(tag)) {
+      WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
+      stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
+          executorContext.getExecutorsBolt().timerService()));
+      stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
+          kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+      processMainInput(elem);
+    } else {
+      processSideInput(tag, elem);
+    }
+  }
+
+  @Override
+  public void onTimer(Object key, TimerInternals.TimerData timerData) {
+    stepContext.setStateInternals(new JStormStateInternals<>(key,
+        kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+    super.onTimer(key, timerData);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundMultiTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundMultiTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundMultiTranslator.java
new file mode 100644
index 0000000..7daa1cb
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundMultiTranslator.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
+import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.PValueBase;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ * Translates a ParDo.BoundMulti to a Storm {@link DoFnExecutor}.
+ */
+class ParDoBoundMultiTranslator<InputT, OutputT>
+    extends TransformTranslator.Default<ParDo.MultiOutput<InputT, OutputT>> {
+
+  @Override
+  public void translateNode(
+      ParDo.MultiOutput<InputT, OutputT> transform, TranslationContext context) {
+    final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    final TupleTag<InputT> inputTag = (TupleTag<InputT>) userGraphContext.getInputTag();
+    PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
+
+    Map<TupleTag<?>, PValue> allOutputs = Maps.newHashMap(userGraphContext.getOutputs());
+    Map<TupleTag<?>, TupleTag<?>> localToExternalTupleTagMap = Maps.newHashMap();
+    for (Map.Entry<TupleTag<?>, PValue> entry : allOutputs.entrySet()) {
+      Iterator<TupleTag<?>> itr = ((PValueBase) entry.getValue()).expand().keySet().iterator();
+      localToExternalTupleTagMap.put(entry.getKey(), itr.next());
+    }
+
+    TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
+    List<TupleTag<?>> sideOutputTags = userGraphContext.getOutputTags();
+    sideOutputTags.remove(mainOutputTag);
+
+    Map<TupleTag<?>, PValue> allInputs = Maps.newHashMap(userGraphContext.getInputs());
+    for (PCollectionView pCollectionView : transform.getSideInputs()) {
+      allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+    }
+    String description = describeTransform(
+        transform,
+        allInputs,
+        allOutputs);
+
+    ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
+    for (PCollectionView pCollectionView : transform.getSideInputs()) {
+      sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+    }
+
+    DoFnExecutor executor;
+    DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
+    if (signature.stateDeclarations().size() > 0
+        || signature.timerDeclarations().size() > 0) {
+      executor = new MultiStatefulDoFnExecutor<>(
+          userGraphContext.getStepName(),
+          description,
+          userGraphContext.getOptions(),
+          (DoFn<KV, OutputT>) transform.getFn(),
+          (Coder) WindowedValue.getFullCoder(
+              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+          input.getWindowingStrategy(),
+          (TupleTag<KV>) inputTag,
+          transform.getSideInputs(),
+          sideInputTagToView.build(),
+          mainOutputTag,
+          sideOutputTags,
+          localToExternalTupleTagMap);
+    } else {
+      executor = new MultiOutputDoFnExecutor<>(
+          userGraphContext.getStepName(),
+          description,
+          userGraphContext.getOptions(),
+          transform.getFn(),
+          WindowedValue.getFullCoder(
+              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+          input.getWindowingStrategy(),
+          inputTag,
+          transform.getSideInputs(),
+          sideInputTagToView.build(),
+          mainOutputTag,
+          sideOutputTags,
+          localToExternalTupleTagMap);
+    }
+
+    context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java
new file mode 100644
index 0000000..6feb7f8
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
+import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Translates a ParDo.Bound to a JStorm {@link DoFnExecutor}.
+ */
+class ParDoBoundTranslator<InputT, OutputT>
+    extends TransformTranslator.Default<ParDo.SingleOutput<InputT, OutputT>> {
+
+  private static final Logger LOG = LoggerFactory.getLogger(ParDoBoundTranslator.class);
+
+  @Override
+  public void translateNode(
+      ParDo.SingleOutput<InputT, OutputT> transform, TranslationContext context) {
+    final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    final TupleTag<?> inputTag = userGraphContext.getInputTag();
+    PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
+
+    TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
+    List<TupleTag<?>> sideOutputTags = Lists.newArrayList();
+
+    Map<TupleTag<?>, PValue> allInputs =
+        avro.shaded.com.google.common.collect.Maps.newHashMap(userGraphContext.getInputs());
+    for (PCollectionView pCollectionView : transform.getSideInputs()) {
+      allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+    }
+    String description = describeTransform(
+        transform,
+        allInputs,
+        userGraphContext.getOutputs());
+
+    ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
+    for (PCollectionView pCollectionView : transform.getSideInputs()) {
+      sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+    }
+
+    DoFnExecutor executor;
+    DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
+    if (signature.stateDeclarations().size() > 0
+        || signature.timerDeclarations().size() > 0) {
+      executor = new StatefulDoFnExecutor<>(
+          userGraphContext.getStepName(),
+          description,
+          userGraphContext.getOptions(),
+          (DoFn<KV, OutputT>) transform.getFn(),
+          (Coder) WindowedValue.getFullCoder(
+              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+          input.getWindowingStrategy(),
+          (TupleTag<KV>) inputTag,
+          transform.getSideInputs(),
+          sideInputTagToView.build(),
+          mainOutputTag,
+          sideOutputTags);
+    } else {
+      executor = new DoFnExecutor<>(
+          userGraphContext.getStepName(),
+          description,
+          userGraphContext.getOptions(),
+          transform.getFn(),
+          WindowedValue.getFullCoder(
+              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+          input.getWindowingStrategy(),
+          (TupleTag<InputT>) inputTag,
+          transform.getSideInputs(),
+          sideInputTagToView.build(),
+          mainOutputTag,
+          sideOutputTags);
+    }
+
+    context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/RunnerUtils.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/RunnerUtils.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/RunnerUtils.java
new file mode 100644
index 0000000..4f469f3
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/RunnerUtils.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import org.apache.beam.runners.core.KeyedWorkItem;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+
+/**
+ * Utils for JStorm runner.
+ */
+class RunnerUtils {
+  /**
+   * Convert {@link WindowedValue} into {@link KeyedWorkItem}.
+   * @param elem
+   * @return
+   */
+  public static <K, V> KeyedWorkItem<K, V> toKeyedWorkItem(WindowedValue<KV<K, V>> elem) {
+    WindowedValue<KV<K, V>> kvElem = (WindowedValue<KV<K, V>>) elem;
+    SingletonKeyedWorkItem<K, V> workItem = SingletonKeyedWorkItem.of(
+        kvElem.getValue().getKey(),
+        kvElem.withValue(kvElem.getValue().getValue()));
+    return workItem;
+  }
+
+  public static boolean isGroupByKeyExecutor(Executor executor) {
+    if (executor instanceof GroupByWindowExecutor) {
+      return true;
+    } else if (executor instanceof StatefulDoFnExecutor
+            || executor instanceof MultiStatefulDoFnExecutor) {
+      return true;
+    } else {
+      return false;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SerializedPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SerializedPipelineOptions.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SerializedPipelineOptions.java
new file mode 100644
index 0000000..14d2972
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SerializedPipelineOptions.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.Serializable;
+import org.apache.beam.sdk.options.PipelineOptions;
+
+/**
+ * Encapsulates the PipelineOptions in serialized form to ship them to the cluster.
+ */
+public class SerializedPipelineOptions implements Serializable {
+
+  private final byte[] serializedOptions;
+
+  /**
+   * Lazily initialized copy of deserialized options.
+   */
+  private transient PipelineOptions pipelineOptions;
+
+  public SerializedPipelineOptions(PipelineOptions options) {
+    checkNotNull(options, "PipelineOptions must not be null.");
+
+    try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
+      new ObjectMapper().writeValue(baos, options);
+      this.serializedOptions = baos.toByteArray();
+    } catch (Exception e) {
+      throw new RuntimeException("Couldn't serialize PipelineOptions.", e);
+    }
+
+  }
+
+  public PipelineOptions getPipelineOptions() {
+    if (pipelineOptions == null) {
+      try {
+        pipelineOptions = new ObjectMapper().readValue(serializedOptions, PipelineOptions.class);
+      } catch (IOException e) {
+        throw new RuntimeException("Couldn't deserialize the PipelineOptions.", e);
+      }
+    }
+
+    return pipelineOptions;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SingletonKeyedWorkItem.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SingletonKeyedWorkItem.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SingletonKeyedWorkItem.java
new file mode 100644
index 0000000..b321c76
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SingletonKeyedWorkItem.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import java.util.Collections;
+import org.apache.beam.runners.core.KeyedWorkItem;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.sdk.util.WindowedValue;
+
+/**
+ * Singleton keyed word item.
+ * @param <K>
+ * @param <ElemT>
+ */
+class SingletonKeyedWorkItem<K, ElemT> implements KeyedWorkItem<K, ElemT> {
+
+  final K key;
+  final WindowedValue<ElemT> value;
+
+  private SingletonKeyedWorkItem(K key, WindowedValue<ElemT> value) {
+    this.key = key;
+    this.value = value;
+  }
+
+  public static <K, ElemT> SingletonKeyedWorkItem<K, ElemT> of(K key, WindowedValue<ElemT> value) {
+    return new SingletonKeyedWorkItem<K, ElemT>(key, value);
+  }
+
+  @Override
+  public K key() {
+    return key;
+  }
+
+  public WindowedValue<ElemT> value() {
+    return value;
+  }
+
+  @Override
+  public Iterable<TimerInternals.TimerData> timersIterable() {
+    return Collections.EMPTY_LIST;
+  }
+
+  @Override
+  public Iterable<WindowedValue<ElemT>> elementsIterable() {
+    return Collections.singletonList(value);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StatefulDoFnExecutor.java
new file mode 100644
index 0000000..911f259
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StatefulDoFnExecutor.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.runners.jstorm.JStormPipelineOptions;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.WindowingStrategy;
+
+/**
+ * JStorm {@link Executor} for stateful {@link DoFn}.
+ * @param <OutputT>
+ */
+class StatefulDoFnExecutor<OutputT> extends DoFnExecutor<KV, OutputT> {
+  public StatefulDoFnExecutor(
+      String stepName, String description, JStormPipelineOptions pipelineOptions,
+      DoFn<KV, OutputT> doFn, Coder<WindowedValue<KV>> inputCoder,
+      WindowingStrategy<?, ?> windowingStrategy, TupleTag<KV> mainInputTag,
+      Collection<PCollectionView<?>> sideInputs, Map<TupleTag, PCollectionView<?>>
+          sideInputTagToView, TupleTag<OutputT> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
+    super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy,
+        mainInputTag, sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
+  }
+
+  @Override
+  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+    if (mainInputTag.equals(tag)) {
+      WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
+      stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
+          executorContext.getExecutorsBolt().timerService()));
+      stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
+          kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+      processMainInput(elem);
+    } else {
+      processSideInput(tag, elem);
+    }
+  }
+
+  @Override
+  public void onTimer(Object key, TimerInternals.TimerData timerData) {
+    stepContext.setStateInternals(new JStormStateInternals<>(key,
+        kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+    super.onTimer(key, timerData);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Stream.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Stream.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Stream.java
new file mode 100644
index 0000000..30ff18c
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Stream.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.auto.value.AutoValue;
+import java.util.List;
+import javax.annotation.Nullable;
+
+/**
+ * Class that defines the stream connection between upstream and downstream components.
+ */
+@AutoValue
+public abstract class Stream {
+
+  public abstract Producer getProducer();
+
+  public abstract Consumer getConsumer();
+
+  public static Stream of(Producer producer, Consumer consumer) {
+    return new AutoValue_Stream(
+        producer, consumer);
+  }
+
+  /**
+   * JStorm producer.
+   */
+  @AutoValue
+  public abstract static class Producer {
+    public abstract String getComponentId();
+
+    public abstract String getStreamId();
+
+    public abstract String getStreamName();
+
+    public static Producer of(String componentId, String streamId, String streamName) {
+      return new AutoValue_Stream_Producer(
+          componentId, streamId, streamName);
+    }
+  }
+
+  /**
+   * JStorm consumer.
+   */
+  @AutoValue
+  public abstract static class Consumer {
+    public abstract String getComponentId();
+
+    public abstract Grouping getGrouping();
+
+    public static Consumer of(String componentId, Grouping grouping) {
+      return new AutoValue_Stream_Consumer(
+          componentId, grouping);
+    }
+  }
+
+  /**
+   * JStorm grouping, which define how to transfer message between two nodes.
+   */
+  @AutoValue
+  public abstract static class Grouping {
+    public abstract Type getType();
+
+    @Nullable
+    public abstract List<String> getFields();
+
+    public static Grouping of(Type type) {
+      checkArgument(!Type.FIELDS.equals(type), "Fields grouping should provide key fields.");
+      return new AutoValue_Stream_Grouping(
+          type, null /* fields */);
+    }
+
+    public static Grouping byFields(List<String> fields) {
+      checkNotNull(fields, "fields");
+      checkArgument(!fields.isEmpty(), "No key fields were provided for field grouping!");
+      return new AutoValue_Stream_Grouping(
+          Type.FIELDS, fields);
+    }
+
+    /**
+     * Types of stream groupings Storm allows.
+     */
+    public enum Type {
+      ALL, CUSTOM, DIRECT, SHUFFLE, LOCAL_OR_SHUFFLE, FIELDS, GLOBAL, NONE
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java
new file mode 100644
index 0000000..29345aa
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import java.io.Serializable;
+import java.util.List;
+import org.apache.beam.runners.core.TimerInternals;
+import org.joda.time.Instant;
+
+/**
+ * Interface that tracks input watermarks and manages timers in each bolt.
+ */
+interface TimerService extends Serializable {
+
+  void init(List<Integer> upStreamTasks);
+
+  /**
+   *
+   * @param task
+   * @param inputWatermark
+   * @return new watermark if any timer is triggered during the update of watermark, otherwise 0
+   */
+  long updateInputWatermark(Integer task, long inputWatermark);
+
+  long currentInputWatermark();
+
+  long currentOutputWatermark();
+
+  void clearWatermarkHold(String namespace);
+
+  void addWatermarkHold(String namespace, Instant watermarkHold);
+
+  void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor);
+
+  void fireTimers(long newWatermark);
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java
new file mode 100644
index 0000000..c2600e5
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
+import com.alibaba.jstorm.utils.Pair;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.sdk.state.TimeDomain;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.joda.time.Instant;
+
+/**
+ * Default implementation of {@link TimerService}.
+ */
+class TimerServiceImpl implements TimerService {
+  private transient ExecutorContext executorContext;
+  private transient Map<Integer, DoFnExecutor> idToDoFnExecutor;
+
+  private final ConcurrentMap<Integer, Long> upStreamTaskToInputWatermark =
+      new ConcurrentHashMap<>();
+  private final PriorityQueue<Long> inputWatermarks = new PriorityQueue<>();
+  private final PriorityQueue<Instant> watermarkHolds = new PriorityQueue<>();
+  private final Map<String, Instant> namespaceToWatermarkHold = new HashMap<>();
+  private final transient PriorityQueue<TimerInternals.TimerData> eventTimeTimersQueue =
+      new PriorityQueue<>();
+  private final Map<TimerInternals.TimerData, Set<Pair<Integer, Object>>>
+      timerDataToKeyedExecutors = Maps.newHashMap();
+
+  private boolean initialized = false;
+
+  public TimerServiceImpl() {
+  }
+
+  public TimerServiceImpl(ExecutorContext executorContext) {
+    this.executorContext = executorContext;
+    this.idToDoFnExecutor = executorContext.getExecutorsBolt().getIdToDoFnExecutor();
+  }
+
+  @Override
+  public void init(List<Integer> upStreamTasks) {
+    for (Integer task : upStreamTasks) {
+      upStreamTaskToInputWatermark.put(task, BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
+      inputWatermarks.add(BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
+    }
+    initialized = true;
+  }
+
+  @Override
+  public synchronized long updateInputWatermark(Integer task, long taskInputWatermark) {
+    checkState(initialized, "TimerService has not been initialized.");
+    Long oldTaskInputWatermark = upStreamTaskToInputWatermark.get(task);
+    // Make sure the input watermark don't go backward.
+    if (taskInputWatermark > oldTaskInputWatermark) {
+      upStreamTaskToInputWatermark.put(task, taskInputWatermark);
+      inputWatermarks.add(taskInputWatermark);
+      inputWatermarks.remove(oldTaskInputWatermark);
+
+      long newLocalInputWatermark = currentInputWatermark();
+      if (newLocalInputWatermark > oldTaskInputWatermark) {
+        return newLocalInputWatermark;
+      }
+    }
+    return 0;
+  }
+
+  @Override
+  public void fireTimers(long newWatermark) {
+    TimerInternals.TimerData timerData;
+    while ((timerData = eventTimeTimersQueue.peek()) != null
+        && timerData.getTimestamp().getMillis() <= newWatermark) {
+      for (Pair<Integer, Object> keyedExecutor : timerDataToKeyedExecutors.get(timerData)) {
+        DoFnExecutor executor = idToDoFnExecutor.get(keyedExecutor.getFirst());
+        executor.onTimer(keyedExecutor.getSecond(), timerData);
+      }
+      eventTimeTimersQueue.remove();
+      timerDataToKeyedExecutors.remove(timerData);
+    }
+  }
+
+  @Override
+  public long currentInputWatermark() {
+    return initialized ? inputWatermarks.peek() : BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis();
+  }
+
+  @Override
+  public long currentOutputWatermark() {
+    if (watermarkHolds.isEmpty()) {
+      return currentInputWatermark();
+    } else {
+      return Math.min(currentInputWatermark(), watermarkHolds.peek().getMillis());
+    }
+  }
+
+  @Override
+  public void clearWatermarkHold(String namespace) {
+    Instant currentHold = namespaceToWatermarkHold.get(namespace);
+    if (currentHold != null) {
+      watermarkHolds.remove(currentHold);
+      namespaceToWatermarkHold.remove(namespace);
+    }
+  }
+
+  @Override
+  public void addWatermarkHold(String namespace, Instant watermarkHold) {
+    Instant currentHold = namespaceToWatermarkHold.get(namespace);
+    if (currentHold == null) {
+      namespaceToWatermarkHold.put(namespace, watermarkHold);
+      watermarkHolds.add(watermarkHold);
+    } else if (currentHold != null && watermarkHold.isBefore(currentHold)) {
+      namespaceToWatermarkHold.put(namespace, watermarkHold);
+      watermarkHolds.add(watermarkHold);
+      watermarkHolds.remove(currentHold);
+    }
+  }
+
+  @Override
+  public void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor) {
+    checkArgument(
+        TimeDomain.EVENT_TIME.equals(timerData.getDomain()),
+        String.format("Does not support domain: %s.", timerData.getDomain()));
+    Set<Pair<Integer, Object>> keyedExecutors = timerDataToKeyedExecutors.get(timerData);
+    if (keyedExecutors == null) {
+      keyedExecutors = Sets.newHashSet();
+      eventTimeTimersQueue.add(timerData);
+    }
+    keyedExecutors.add(new Pair<>(doFnExecutor.getInternalDoFnExecutorId(), key));
+    timerDataToKeyedExecutors.put(timerData, keyedExecutors);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java
new file mode 100644
index 0000000..edd3d8a
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.collect.FluentIterable;
+import java.util.Map;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ * Interface for classes capable of tranforming Beam PTransforms into Storm primitives.
+ */
+interface TransformTranslator<T extends PTransform<?, ?>> {
+
+  void translateNode(T transform, TranslationContext context);
+
+  /**
+   * Returns true if this translator can translate the given transform.
+   */
+  boolean canTranslate(T transform, TranslationContext context);
+
+    /**
+     * Default translator.
+     * @param <T1>
+     */
+  class Default<T1 extends PTransform<?, ?>> implements TransformTranslator<T1> {
+    @Override
+    public void translateNode(T1 transform, TranslationContext context) {
+
+    }
+
+    @Override
+    public boolean canTranslate(T1 transform, TranslationContext context) {
+      return true;
+    }
+
+    static String describeTransform(
+        PTransform<?, ?> transform,
+        Map<TupleTag<?>, PValue> inputs,
+        Map<TupleTag<?>, PValue> outputs) {
+      return String.format("%s --> %s --> %s",
+          Joiner.on('+').join(FluentIterable.from(inputs.entrySet())
+              .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
+                @Override
+                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPValue) {
+                  return taggedPValue.getKey().getId();
+                  // return taggedPValue.getValue().getName();
+                }
+              })),
+          transform.getName(),
+          Joiner.on('+').join(FluentIterable.from(outputs.entrySet())
+              .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
+                @Override
+                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPvalue) {
+                  return taggedPvalue.getKey().getId();
+                  //return taggedPValue.getValue().getName();
+                }
+              })));
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
index 28d102d..b84fd4a 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
@@ -34,12 +34,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import org.apache.beam.runners.jstorm.JStormPipelineOptions;
-import org.apache.beam.runners.jstorm.translation.runtime.Executor;
-import org.apache.beam.runners.jstorm.translation.runtime.ExecutorsBolt;
-import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
-import org.apache.beam.runners.jstorm.translation.translator.Stream;
-import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
-import org.apache.beam.runners.jstorm.util.RunnerUtils;
 import org.apache.beam.sdk.runners.AppliedPTransform;
 import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.PValueBase;

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
index 316186e..9eaa13a 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
@@ -19,15 +19,6 @@ package org.apache.beam.runners.jstorm.translation;
 
 import java.util.HashMap;
 import java.util.Map;
-import org.apache.beam.runners.jstorm.translation.translator.BoundedSourceTranslator;
-import org.apache.beam.runners.jstorm.translation.translator.FlattenTranslator;
-import org.apache.beam.runners.jstorm.translation.translator.GroupByKeyTranslator;
-import org.apache.beam.runners.jstorm.translation.translator.ParDoBoundMultiTranslator;
-import org.apache.beam.runners.jstorm.translation.translator.ParDoBoundTranslator;
-import org.apache.beam.runners.jstorm.translation.translator.TransformTranslator;
-import org.apache.beam.runners.jstorm.translation.translator.UnboundedSourceTranslator;
-import org.apache.beam.runners.jstorm.translation.translator.ViewTranslator;
-import org.apache.beam.runners.jstorm.translation.translator.WindowAssignTranslator;
 import org.apache.beam.sdk.io.Read;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
@@ -40,7 +31,7 @@ import org.slf4j.LoggerFactory;
 /**
  * Lookup table mapping PTransform types to associated TransformTranslator implementations.
  */
-public class TranslatorRegistry {
+class TranslatorRegistry {
   private static final Logger LOG = LoggerFactory.getLogger(TranslatorRegistry.class);
 
   private static final Map<Class<? extends PTransform>, TransformTranslator> TRANSLATORS =

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TxExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TxExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TxExecutorsBolt.java
new file mode 100644
index 0000000..2159cfa
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TxExecutorsBolt.java
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Tuple;
+import com.alibaba.jstorm.cache.IKvStore;
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.alibaba.jstorm.transactional.bolt.ITransactionStatefulBoltExecutor;
+import java.io.IOException;
+import java.util.Map;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Transactional executors bolt handles the checkpoint and restore of state and timer.
+ */
+public class TxExecutorsBolt implements ITransactionStatefulBoltExecutor {
+  private static final Logger LOG = LoggerFactory.getLogger(TxExecutorsBolt.class);
+
+  private static final String TIME_SERVICE_STORE_ID = "timer_service_store";
+  private static final String TIMER_SERVICE_KET = "timer_service_key";
+
+  private ExecutorsBolt executorsBolt;
+  private IKvStoreManager kvStoreManager;
+  private IKvStore<String, TimerService> timerServiceStore;
+
+  public TxExecutorsBolt(ExecutorsBolt executorsBolt) {
+    this.executorsBolt = executorsBolt;
+    this.executorsBolt.setStatefulBolt(true);
+  }
+
+  @Override
+  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+    try {
+      executorsBolt.prepare(stormConf, context, collector);
+      kvStoreManager = executorsBolt.getExecutorContext().getKvStoreManager();
+      timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
+    } catch (IOException e) {
+      LOG.error("Failed to prepare stateful bolt", e);
+      throw new RuntimeException(e.getMessage());
+    }
+  }
+
+  @Override
+  public void execute(Tuple input) {
+    executorsBolt.execute(input);
+  }
+
+  @Override
+  public void cleanup() {
+    executorsBolt.cleanup();
+  }
+
+  @Override
+  public void declareOutputFields(OutputFieldsDeclarer declarer) {
+    executorsBolt.declareOutputFields(declarer);
+  }
+
+  @Override
+  public Map<String, Object> getComponentConfiguration() {
+    return executorsBolt.getComponentConfiguration();
+  }
+
+  @Override
+  public void initState(Object userState) {
+    LOG.info("Begin to init from state: {}", userState);
+    restore(userState);
+  }
+
+  @Override
+  public Object finishBatch(long batchId) {
+    try {
+      timerServiceStore.put(TIMER_SERVICE_KET, executorsBolt.timerService());
+    } catch (IOException e) {
+      LOG.error("Failed to store current timer service status", e);
+      throw new RuntimeException(e.getMessage());
+    }
+    kvStoreManager.checkpoint(batchId);
+    return null;
+  }
+
+  @Override
+  public Object commit(long batchId, Object state) {
+    return kvStoreManager.backup(batchId);
+  }
+
+  @Override
+  public void rollBack(Object userState) {
+    LOG.info("Begin to rollback from state: {}", userState);
+    restore(userState);
+  }
+
+  @Override
+  public void ackCommit(long batchId, long timeStamp) {
+    kvStoreManager.remove(batchId);
+  }
+
+  private void restore(Object userState) {
+    try {
+      // restore all states
+      kvStoreManager.restore(userState);
+
+      // init timer service
+      timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
+      TimerService timerService = timerServiceStore.get(TIMER_SERVICE_KET);
+      if (timerService == null) {
+        timerService = executorsBolt.initTimerService();
+      }
+      executorsBolt.setTimerService(timerService);
+    } catch (IOException e) {
+      LOG.error("Failed to restore state", e);
+      throw new RuntimeException(e.getMessage());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TxUnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TxUnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TxUnboundedSourceSpout.java
new file mode 100644
index 0000000..382cb50
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TxUnboundedSourceSpout.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import backtype.storm.spout.SpoutOutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import com.alibaba.jstorm.cache.IKvStore;
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.alibaba.jstorm.cache.KvStoreManagerFactory;
+import com.alibaba.jstorm.transactional.spout.ITransactionSpoutExecutor;
+import java.io.IOException;
+import java.util.Map;
+import org.apache.beam.sdk.io.UnboundedSource;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Transactional unbounded source spout handles the checkpoint and restore of state and timer.
+ */
+public class TxUnboundedSourceSpout implements ITransactionSpoutExecutor {
+  private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(TxUnboundedSourceSpout.class);
+
+  private static final String SOURCE_STORE_ID = "SourceCheckpoint";
+  private static final String CHECKPOINT_MARK = "CheckpointMark";
+
+  private UnboundedSourceSpout sourceSpout;
+  private UnboundedSource.UnboundedReader reader;
+  private IKvStoreManager kvStoreManager;
+  private IKvStore<String, UnboundedSource.CheckpointMark> sourceCheckpointStore;
+
+  public TxUnboundedSourceSpout(UnboundedSourceSpout sourceSpout) {
+    this.sourceSpout = sourceSpout;
+  }
+
+  private void restore(Object userState) {
+    try {
+      kvStoreManager.restore(userState);
+      sourceCheckpointStore = kvStoreManager.getOrCreate(SOURCE_STORE_ID);
+      UnboundedSource.CheckpointMark checkpointMark = sourceCheckpointStore.get(CHECKPOINT_MARK);
+      sourceSpout.createSourceReader(checkpointMark);
+      reader = sourceSpout.getUnboundedSourceReader();
+    } catch (IOException e) {
+      LOG.error("Failed to init state", e);
+      throw new RuntimeException(e.getMessage());
+    }
+  }
+
+  @Override
+  public void initState(Object userState) {
+    restore(userState);
+  }
+
+  @Override
+  public Object finishBatch(long checkpointId) {
+    try {
+      // Store check point mark from unbounded source reader
+      UnboundedSource.CheckpointMark checkpointMark = reader.getCheckpointMark();
+      sourceCheckpointStore.put(CHECKPOINT_MARK, checkpointMark);
+
+      // checkpoint all kv stores in current manager
+      kvStoreManager.checkpoint(checkpointId);
+    } catch (IOException e) {
+      LOG.error(String.format("Failed to finish batch-%s", checkpointId), e);
+      throw new RuntimeException(e.getMessage());
+    }
+    return null;
+  }
+
+  @Override
+  public Object commit(long batchId, Object state) {
+    // backup kv stores to remote state backend
+    return kvStoreManager.backup(batchId);
+  }
+
+  @Override
+  public void rollBack(Object userState) {
+    restore(userState);
+  }
+
+  @Override
+  public void ackCommit(long batchId, long timeStamp) {
+    // remove obsolete state in bolt local and remote state backend
+    kvStoreManager.remove(batchId);
+  }
+
+  @Override
+  public void declareOutputFields(OutputFieldsDeclarer declarer) {
+    sourceSpout.declareOutputFields(declarer);
+  }
+
+  @Override
+  public Map<String, Object> getComponentConfiguration() {
+    return sourceSpout.getComponentConfiguration();
+  }
+
+  @Override
+  public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+    try {
+      sourceSpout.open(conf, context, collector);
+      String storeName = String.format("task-%s", context.getThisTaskId());
+      String storePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
+      kvStoreManager = KvStoreManagerFactory.getKvStoreManagerWithMonitor(
+          context, storeName, storePath, true);
+
+      reader = sourceSpout.getUnboundedSourceReader();
+    } catch (IOException e) {
+      LOG.error("Failed to open transactional unbounded source spout", e);
+      throw new RuntimeException(e.getMessage());
+    }
+  }
+
+  @Override
+  public void close() {
+    sourceSpout.close();
+  }
+
+  @Override
+  public void activate() {
+    sourceSpout.activate();
+  }
+
+  @Override
+  public void deactivate() {
+    sourceSpout.deactivate();
+  }
+
+  @Override
+  public void nextTuple() {
+    sourceSpout.nextTuple();
+  }
+
+  @Override
+  public void ack(Object msgId) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public void fail(Object msgId) {
+    throw new UnsupportedOperationException();
+  }
+}


[39/53] [abbrv] beam git commit: jstorm-runner: add SdkRepackImmutableMapSerializer.

Posted by pe...@apache.org.
jstorm-runner: add SdkRepackImmutableMapSerializer.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/588a6981
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/588a6981
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/588a6981

Branch: refs/heads/jstorm-runner
Commit: 588a6981855b68b9733a1b0f368dce0ad5cfe837
Parents: ad04648
Author: Pei He <pe...@apache.org>
Authored: Wed Jul 19 11:13:04 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:59 2017 +0800

----------------------------------------------------------------------
 .../beam/runners/jstorm/JStormRunner.java       |  2 +
 .../SdkRepackImmutableMapSerializer.java        | 73 ++++++++++++++++++++
 2 files changed, 75 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/588a6981/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
index baf4e5a..286a975 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
@@ -37,6 +37,7 @@ import org.apache.beam.runners.jstorm.serialization.ImmutableSetSerializer;
 import org.apache.beam.runners.jstorm.serialization.KvStoreIterableSerializer;
 import org.apache.beam.runners.jstorm.serialization.SdkRepackImmuListSerializer;
 import org.apache.beam.runners.jstorm.serialization.SdkRepackImmuSetSerializer;
+import org.apache.beam.runners.jstorm.serialization.SdkRepackImmutableMapSerializer;
 import org.apache.beam.runners.jstorm.serialization.UnmodifiableCollectionsSerializer;
 import org.apache.beam.runners.jstorm.translation.AbstractComponent;
 import org.apache.beam.runners.jstorm.translation.CommonInstance;
@@ -103,6 +104,7 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
     ImmutableSetSerializer.registerSerializers(config);
     SdkRepackImmuSetSerializer.registerSerializers(config);
     ImmutableMapSerializer.registerSerializers(config);
+    SdkRepackImmutableMapSerializer.registerSerializers(config);
 
     config.registerDefaultSerailizer(KvStoreIterable.class, KvStoreIterableSerializer.class);
     return config;

http://git-wip-us.apache.org/repos/asf/beam/blob/588a6981/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java
new file mode 100644
index 0000000..546538a
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java
@@ -0,0 +1,73 @@
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableMap;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.Maps;
+
+/**
+ * Specific serializer of {@link Kryo} for ImmutableMap.
+ */
+public class SdkRepackImmutableMapSerializer
+    extends Serializer<ImmutableMap<Object, ? extends Object>> {
+
+  private static final boolean DOES_NOT_ACCEPT_NULL = true;
+  private static final boolean IMMUTABLE = true;
+
+  public SdkRepackImmutableMapSerializer() {
+    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+  }
+
+  @Override
+  public void write(Kryo kryo, Output output, ImmutableMap<Object, ? extends Object> immutableMap) {
+    kryo.writeObject(output, Maps.newHashMap(immutableMap));
+  }
+
+  @Override
+  public ImmutableMap<Object, Object> read(
+      Kryo kryo,
+      Input input,
+      Class<ImmutableMap<Object, ? extends Object>> type) {
+    Map map = kryo.readObject(input, HashMap.class);
+    return ImmutableMap.copyOf(map);
+  }
+
+  /**
+   * Creates a new {@link SdkRepackImmutableMapSerializer} and registers its serializer
+   * for the several ImmutableMap related classes.
+   */
+  public static void registerSerializers(Config config) {
+
+    config.registerSerialization(ImmutableMap.class, SdkRepackImmutableMapSerializer.class);
+    config.registerSerialization(
+        ImmutableMap.of().getClass(), SdkRepackImmutableMapSerializer.class);
+
+    Object o1 = new Object();
+    Object o2 = new Object();
+
+    config.registerSerialization(
+        ImmutableMap.of(o1, o1).getClass(), SdkRepackImmutableMapSerializer.class);
+    config.registerSerialization(
+        ImmutableMap.of(o1, o1, o2, o2).getClass(),
+        SdkRepackImmutableMapSerializer.class);
+    Map<DummyEnum, Object> enumMap = new EnumMap<DummyEnum, Object>(DummyEnum.class);
+    for (DummyEnum e : DummyEnum.values()) {
+      enumMap.put(e, o1);
+    }
+
+    config.registerSerialization(
+        ImmutableMap.copyOf(enumMap).getClass(),
+        SdkRepackImmutableMapSerializer.class);
+  }
+
+  private enum DummyEnum {
+    VALUE1,
+    VALUE2
+  }
+}


[50/53] [abbrv] beam git commit: jstorm-runner: Add Kryo serializer for UnmodifiableIterable

Posted by pe...@apache.org.
jstorm-runner: Add Kryo serializer for UnmodifiableIterable


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/9e808730
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/9e808730
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/9e808730

Branch: refs/heads/jstorm-runner
Commit: 9e8087306b5562fdecf678979b9f2d49dfaf368f
Parents: 90ed2ef
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Wed Aug 16 19:01:48 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:03:01 2017 +0800

----------------------------------------------------------------------
 .../BeamSdkRepackUtilsSerializer.java           | 34 ++++++++++++++++++++
 .../serialization/GuavaUtilsSerializer.java     | 34 ++++++++++++++++++++
 2 files changed, 68 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/9e808730/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamSdkRepackUtilsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamSdkRepackUtilsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamSdkRepackUtilsSerializer.java
index 4ae47eb..2912194 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamSdkRepackUtilsSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamSdkRepackUtilsSerializer.java
@@ -24,12 +24,14 @@ import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
 import java.util.EnumMap;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.HashBasedTable;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableList;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableMap;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableSet;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableTable;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.Iterables;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.Lists;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.Maps;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.Sets;
@@ -244,10 +246,42 @@ public class BeamSdkRepackUtilsSerializer {
         ImmutableSetSerializer.class);
   }
 
+  /**
+   * Specific serializer of {@link Kryo} for UnmodifiableIterable.
+   */
+  public static class UnmodifiableIterableSerializer extends Serializer<Iterable<Object>> {
+
+    @Override
+    public void write(Kryo kryo, Output output, Iterable<Object> object) {
+      int size = Iterables.size(object);
+      output.writeInt(size, true);
+      for (Object elm : object) {
+        kryo.writeClassAndObject(output, elm);
+      }
+    }
+
+    @Override
+    public Iterable<Object> read(Kryo kryo, Input input, Class<Iterable<Object>> type) {
+      final int size = input.readInt(true);
+      List<Object> iterable = Lists.newArrayList();
+      for (int i = 0; i < size; ++i) {
+        iterable.add(kryo.readClassAndObject(input));
+      }
+      return Iterables.unmodifiableIterable(iterable);
+    }
+  }
+
+  private static void registerUnmodifiableIterablesSerializers(Config config) {
+    config.registerSerialization(
+        Iterables.unmodifiableIterable(Lists.newArrayList()).getClass(),
+        UnmodifiableIterableSerializer.class);
+  }
+
   public static void registerSerializers(Config config) {
     registerImmutableListSerializers(config);
     registerImmutableMapSerializers(config);
     registerImmutableSetSerializers(config);
+    registerUnmodifiableIterablesSerializers(config);
   }
 }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/9e808730/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/GuavaUtilsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/GuavaUtilsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/GuavaUtilsSerializer.java
index e6f750c..ee83aa6 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/GuavaUtilsSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/GuavaUtilsSerializer.java
@@ -27,6 +27,7 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
@@ -34,6 +35,7 @@ import com.google.common.collect.Table;
 
 import java.util.EnumMap;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 /**
@@ -244,9 +246,41 @@ public class GuavaUtilsSerializer {
         ImmutableSetSerializer.class);
   }
 
+  /**
+   * Specific serializer of {@link Kryo} for UnmodifiableIterable.
+   */
+  public static class UnmodifiableIterableSerializer extends Serializer<Iterable<Object>> {
+
+    @Override
+    public void write(Kryo kryo, Output output, Iterable<Object> object) {
+      int size = Iterables.size(object);
+      output.writeInt(size, true);
+      for (Object elm : object) {
+        kryo.writeClassAndObject(output, elm);
+      }
+    }
+
+    @Override
+    public Iterable<Object> read(Kryo kryo, Input input, Class<Iterable<Object>> type) {
+      final int size = input.readInt(true);
+      List<Object> iterable = Lists.newArrayList();
+      for (int i = 0; i < size; ++i) {
+        iterable.add(kryo.readClassAndObject(input));
+      }
+      return Iterables.unmodifiableIterable(iterable);
+    }
+  }
+
+  private static void registerUnmodifiableIterablesSerializers(Config config) {
+    config.registerSerialization(
+        Iterables.unmodifiableIterable(Lists.newArrayList()).getClass(),
+        UnmodifiableIterableSerializer.class);
+  }
+
   public static void registerSerializers(Config config) {
     registerImmutableListSerializers(config);
     registerImmutableMapSerializers(config);
     registerImmutableSetSerializers(config);
+    registerUnmodifiableIterablesSerializers(config);
   }
 }


[11/53] [abbrv] beam git commit: jstorm-runner: fix checkstyles.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
index c487578..77e4381 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
@@ -18,94 +18,101 @@
 package org.apache.beam.runners.jstorm.translation.translator;
 
 import avro.shaded.com.google.common.collect.Maps;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 import org.apache.beam.runners.jstorm.translation.TranslationContext;
 import org.apache.beam.runners.jstorm.translation.runtime.DoFnExecutor;
 import org.apache.beam.runners.jstorm.translation.runtime.MultiOutputDoFnExecutor;
 import org.apache.beam.runners.jstorm.translation.runtime.MultiStatefulDoFnExecutor;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
 import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.*;
-
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.PValueBase;
+import org.apache.beam.sdk.values.TupleTag;
 
 /**
  * Translates a ParDo.BoundMulti to a Storm {@link DoFnExecutor}.
  */
 public class ParDoBoundMultiTranslator<InputT, OutputT>
-        extends TransformTranslator.Default<ParDo.MultiOutput<InputT, OutputT>> {
-
-    @Override
-    public void translateNode(ParDo.MultiOutput<InputT, OutputT> transform, TranslationContext context) {
-        final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        final TupleTag<InputT> inputTag = (TupleTag<InputT>) userGraphContext.getInputTag();
-        PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
+    extends TransformTranslator.Default<ParDo.MultiOutput<InputT, OutputT>> {
 
-        Map<TupleTag<?>, PValue> allOutputs = Maps.newHashMap(userGraphContext.getOutputs());
-        Map<TupleTag<?>, TupleTag<?>> localToExternalTupleTagMap = Maps.newHashMap();
-        for (Map.Entry<TupleTag<?>, PValue> entry : allOutputs.entrySet()) {
-            Iterator<TupleTag<?>> itr = ((PValueBase) entry.getValue()).expand().keySet().iterator();
-            localToExternalTupleTagMap.put(entry.getKey(), itr.next());
-        }
+  @Override
+  public void translateNode(
+      ParDo.MultiOutput<InputT, OutputT> transform, TranslationContext context) {
+    final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    final TupleTag<InputT> inputTag = (TupleTag<InputT>) userGraphContext.getInputTag();
+    PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
 
-        TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
-        List<TupleTag<?>> sideOutputTags = userGraphContext.getOutputTags();
-        sideOutputTags.remove(mainOutputTag);
+    Map<TupleTag<?>, PValue> allOutputs = Maps.newHashMap(userGraphContext.getOutputs());
+    Map<TupleTag<?>, TupleTag<?>> localToExternalTupleTagMap = Maps.newHashMap();
+    for (Map.Entry<TupleTag<?>, PValue> entry : allOutputs.entrySet()) {
+      Iterator<TupleTag<?>> itr = ((PValueBase) entry.getValue()).expand().keySet().iterator();
+      localToExternalTupleTagMap.put(entry.getKey(), itr.next());
+    }
 
-        Map<TupleTag<?>, PValue> allInputs = Maps.newHashMap(userGraphContext.getInputs());
-        for (PCollectionView pCollectionView : transform.getSideInputs()) {
-            allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-        }
-        String description = describeTransform(
-                transform,
-                allInputs,
-                allOutputs);
+    TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
+    List<TupleTag<?>> sideOutputTags = userGraphContext.getOutputTags();
+    sideOutputTags.remove(mainOutputTag);
 
-        ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
-        for (PCollectionView pCollectionView : transform.getSideInputs()) {
-            sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-        }
+    Map<TupleTag<?>, PValue> allInputs = Maps.newHashMap(userGraphContext.getInputs());
+    for (PCollectionView pCollectionView : transform.getSideInputs()) {
+      allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+    }
+    String description = describeTransform(
+        transform,
+        allInputs,
+        allOutputs);
 
-        DoFnExecutor executor;
-        DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
-        if (signature.stateDeclarations().size() > 0
-                || signature.timerDeclarations().size() > 0) {
-            executor = new MultiStatefulDoFnExecutor<>(
-                    userGraphContext.getStepName(),
-                    description,
-                    userGraphContext.getOptions(),
-                    (DoFn<KV, OutputT>) transform.getFn(),
-                    (Coder) WindowedValue.getFullCoder(input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-                    input.getWindowingStrategy(),
-                    (TupleTag<KV>) inputTag,
-                    transform.getSideInputs(),
-                    sideInputTagToView.build(),
-                    mainOutputTag,
-                    sideOutputTags,
-                    localToExternalTupleTagMap);
-        } else {
-            executor = new MultiOutputDoFnExecutor<>(
-                    userGraphContext.getStepName(),
-                    description,
-                    userGraphContext.getOptions(),
-                    transform.getFn(),
-                    WindowedValue.getFullCoder(input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-                    input.getWindowingStrategy(),
-                    inputTag,
-                    transform.getSideInputs(),
-                    sideInputTagToView.build(),
-                    mainOutputTag,
-                    sideOutputTags,
-                    localToExternalTupleTagMap);
-        }
+    ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
+    for (PCollectionView pCollectionView : transform.getSideInputs()) {
+      sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+    }
 
-        context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
+    DoFnExecutor executor;
+    DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
+    if (signature.stateDeclarations().size() > 0
+        || signature.timerDeclarations().size() > 0) {
+      executor = new MultiStatefulDoFnExecutor<>(
+          userGraphContext.getStepName(),
+          description,
+          userGraphContext.getOptions(),
+          (DoFn<KV, OutputT>) transform.getFn(),
+          (Coder) WindowedValue.getFullCoder(
+              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+          input.getWindowingStrategy(),
+          (TupleTag<KV>) inputTag,
+          transform.getSideInputs(),
+          sideInputTagToView.build(),
+          mainOutputTag,
+          sideOutputTags,
+          localToExternalTupleTagMap);
+    } else {
+      executor = new MultiOutputDoFnExecutor<>(
+          userGraphContext.getStepName(),
+          description,
+          userGraphContext.getOptions(),
+          transform.getFn(),
+          WindowedValue.getFullCoder(
+              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+          input.getWindowingStrategy(),
+          inputTag,
+          transform.getSideInputs(),
+          sideInputTagToView.build(),
+          mainOutputTag,
+          sideOutputTags,
+          localToExternalTupleTagMap);
     }
+
+    context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
index 3a952a9..7b998d9 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
@@ -17,24 +17,25 @@
  */
 package org.apache.beam.runners.jstorm.translation.translator;
 
-import java.util.List;
-import java.util.Map;
-
 import avro.shaded.com.google.common.collect.Lists;
-import org.apache.beam.runners.jstorm.translation.runtime.StatefulDoFnExecutor;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
 import org.apache.beam.runners.jstorm.translation.runtime.DoFnExecutor;
+import org.apache.beam.runners.jstorm.translation.runtime.StatefulDoFnExecutor;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
 import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.*;
-
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -42,65 +43,68 @@ import org.slf4j.LoggerFactory;
  * Translates a ParDo.Bound to a Storm {@link DoFnExecutor}.
  */
 public class ParDoBoundTranslator<InputT, OutputT>
-        extends TransformTranslator.Default<ParDo.SingleOutput<InputT, OutputT>> {
-
-    private static final Logger LOG = LoggerFactory.getLogger(ParDoBoundTranslator.class);
+    extends TransformTranslator.Default<ParDo.SingleOutput<InputT, OutputT>> {
 
-    @Override
-    public void translateNode(ParDo.SingleOutput<InputT, OutputT> transform, TranslationContext context) {
-        final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        final TupleTag<?> inputTag = userGraphContext.getInputTag();
-        PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
+  private static final Logger LOG = LoggerFactory.getLogger(ParDoBoundTranslator.class);
 
-        TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
-        List<TupleTag<?>> sideOutputTags = Lists.newArrayList();
+  @Override
+  public void translateNode(
+      ParDo.SingleOutput<InputT, OutputT> transform, TranslationContext context) {
+    final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    final TupleTag<?> inputTag = userGraphContext.getInputTag();
+    PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
 
-        Map<TupleTag<?>, PValue> allInputs = avro.shaded.com.google.common.collect.Maps.newHashMap(userGraphContext.getInputs());
-        for (PCollectionView pCollectionView : transform.getSideInputs()) {
-            allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-        }
-        String description = describeTransform(
-                transform,
-                allInputs,
-                userGraphContext.getOutputs());
+    TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
+    List<TupleTag<?>> sideOutputTags = Lists.newArrayList();
 
-        ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
-        for (PCollectionView pCollectionView : transform.getSideInputs()) {
-            sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-        }
+    Map<TupleTag<?>, PValue> allInputs =
+        avro.shaded.com.google.common.collect.Maps.newHashMap(userGraphContext.getInputs());
+    for (PCollectionView pCollectionView : transform.getSideInputs()) {
+      allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+    }
+    String description = describeTransform(
+        transform,
+        allInputs,
+        userGraphContext.getOutputs());
 
-        DoFnExecutor executor;
-        DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
-        if (signature.stateDeclarations().size() > 0
-                || signature.timerDeclarations().size() > 0) {
-            executor = new StatefulDoFnExecutor<>(
-                    userGraphContext.getStepName(),
-                    description,
-                    userGraphContext.getOptions(),
-                    (DoFn<KV, OutputT>) transform.getFn(),
-                    (Coder) WindowedValue.getFullCoder(
-                            input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-                    input.getWindowingStrategy(),
-                    (TupleTag<KV>) inputTag,
-                    transform.getSideInputs(),
-                    sideInputTagToView.build(),
-                    mainOutputTag,
-                    sideOutputTags);
-        } else {
-            executor = new DoFnExecutor<>(
-                    userGraphContext.getStepName(),
-                    description,
-                    userGraphContext.getOptions(),
-                    transform.getFn(),
-                    WindowedValue.getFullCoder(input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-                    input.getWindowingStrategy(),
-                    (TupleTag<InputT>) inputTag,
-                    transform.getSideInputs(),
-                    sideInputTagToView.build(),
-                    mainOutputTag,
-                    sideOutputTags);
-        }
+    ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
+    for (PCollectionView pCollectionView : transform.getSideInputs()) {
+      sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+    }
 
-        context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
+    DoFnExecutor executor;
+    DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
+    if (signature.stateDeclarations().size() > 0
+        || signature.timerDeclarations().size() > 0) {
+      executor = new StatefulDoFnExecutor<>(
+          userGraphContext.getStepName(),
+          description,
+          userGraphContext.getOptions(),
+          (DoFn<KV, OutputT>) transform.getFn(),
+          (Coder) WindowedValue.getFullCoder(
+              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+          input.getWindowingStrategy(),
+          (TupleTag<KV>) inputTag,
+          transform.getSideInputs(),
+          sideInputTagToView.build(),
+          mainOutputTag,
+          sideOutputTags);
+    } else {
+      executor = new DoFnExecutor<>(
+          userGraphContext.getStepName(),
+          description,
+          userGraphContext.getOptions(),
+          transform.getFn(),
+          WindowedValue.getFullCoder(
+              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+          input.getWindowingStrategy(),
+          (TupleTag<InputT>) inputTag,
+          transform.getSideInputs(),
+          sideInputTagToView.build(),
+          mainOutputTag,
+          sideOutputTags);
     }
+
+    context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java
index 1ef1ec3..c450a22 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java
@@ -19,6 +19,6 @@ package org.apache.beam.runners.jstorm.translation.translator;
 
 import org.apache.beam.sdk.transforms.Reshuffle;
 
-public class ReshuffleTranslator<K, V> extends TransformTranslator.Default<Reshuffle<K,V>> {
-    
+public class ReshuffleTranslator<K, V> extends TransformTranslator.Default<Reshuffle<K, V>> {
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
index 5b5a8e2..a15a8ba 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
@@ -17,76 +17,79 @@
  */
 package org.apache.beam.runners.jstorm.translation.translator;
 
-import com.google.auto.value.AutoValue;
-
-import javax.annotation.Nullable;
-import java.util.List;
-
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 
+import com.google.auto.value.AutoValue;
+import java.util.List;
+import javax.annotation.Nullable;
+
 /**
  * Class that defines the stream connection between upstream and downstream components.
  */
 @AutoValue
 public abstract class Stream {
 
-    public abstract Producer getProducer();
-    public abstract Consumer getConsumer();
+  public abstract Producer getProducer();
 
-    public static Stream of(Producer producer, Consumer consumer) {
-        return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream(
-            producer, consumer);
+  public abstract Consumer getConsumer();
+
+  public static Stream of(Producer producer, Consumer consumer) {
+    return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream(
+        producer, consumer);
+  }
+
+  @AutoValue
+  public abstract static class Producer {
+    public abstract String getComponentId();
+
+    public abstract String getStreamId();
+
+    public abstract String getStreamName();
+
+    public static Producer of(String componentId, String streamId, String streamName) {
+      return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Producer(
+          componentId, streamId, streamName);
     }
+  }
 
-    @AutoValue
-    public abstract static class Producer {
-        public abstract String getComponentId();
-        public abstract String getStreamId();
-        public abstract String getStreamName();
+  @AutoValue
+  public abstract static class Consumer {
+    public abstract String getComponentId();
 
-        public static Producer of(String componentId, String streamId, String streamName) {
-            return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Producer(
-                    componentId, streamId, streamName);
-        }
+    public abstract Grouping getGrouping();
+
+    public static Consumer of(String componentId, Grouping grouping) {
+      return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Consumer(
+          componentId, grouping);
     }
+  }
+
+  @AutoValue
+  public abstract static class Grouping {
+    public abstract Type getType();
 
-    @AutoValue
-    public abstract static class Consumer {
-        public abstract String getComponentId();
-        public abstract Grouping getGrouping();
+    @Nullable
+    public abstract List<String> getFields();
+
+    public static Grouping of(Type type) {
+      checkArgument(!Type.FIELDS.equals(type), "Fields grouping should provide key fields.");
+      return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Grouping(
+          type, null /* fields */);
+    }
 
-        public static Consumer of(String componentId, Grouping grouping) {
-            return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Consumer(
-                    componentId, grouping);
-        }
+    public static Grouping byFields(List<String> fields) {
+      checkNotNull(fields, "fields");
+      checkArgument(!fields.isEmpty(), "No key fields were provided for field grouping!");
+      return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Grouping(
+          Type.FIELDS, fields);
     }
 
-    @AutoValue
-    public abstract static class Grouping {
-        public abstract Type getType();
-
-        @Nullable
-        public abstract List<String> getFields();
-
-        public static Grouping of(Type type) {
-            checkArgument(!Type.FIELDS.equals(type), "Fields grouping should provide key fields.");
-            return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Grouping(
-                    type, null /* fields */);
-        }
-
-        public static Grouping byFields(List<String> fields) {
-            checkNotNull(fields, "fields");
-            checkArgument(!fields.isEmpty(), "No key fields were provided for field grouping!");
-            return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Grouping(
-                    Type.FIELDS, fields);
-        }
-
-        /**
-         * Types of stream groupings Storm allows
-         */
-        public enum Type {
-            ALL, CUSTOM, DIRECT, SHUFFLE, LOCAL_OR_SHUFFLE, FIELDS, GLOBAL, NONE
-        }
+    /**
+     * Types of stream groupings Storm allows
+     */
+    public enum Type {
+      ALL, CUSTOM, DIRECT, SHUFFLE, LOCAL_OR_SHUFFLE, FIELDS, GLOBAL, NONE
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
index bebdf7b..487cac0 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
@@ -20,57 +20,57 @@ package org.apache.beam.runners.jstorm.translation.translator;
 import com.google.common.base.Function;
 import com.google.common.base.Joiner;
 import com.google.common.collect.FluentIterable;
+import java.util.Map;
 import org.apache.beam.runners.jstorm.translation.TranslationContext;
 import org.apache.beam.sdk.transforms.PTransform;
-
 import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.TupleTag;
 
-import java.util.Map;
-
 /**
  * Interface for classes capable of tranforming Beam PTransforms into Storm primitives.
  */
 public interface TransformTranslator<T extends PTransform<?, ?>> {
 
-    void translateNode(T transform, TranslationContext context);
+  void translateNode(T transform, TranslationContext context);
 
-    /**
-     * Returns true if this translator can translate the given transform.
-     */
-    boolean canTranslate(T transform, TranslationContext context);
+  /**
+   * Returns true if this translator can translate the given transform.
+   */
+  boolean canTranslate(T transform, TranslationContext context);
 
-    class Default<T1 extends PTransform<?, ?>> implements TransformTranslator<T1> {
-        @Override
-        public void translateNode(T1 transform, TranslationContext context) {
+  class Default<T1 extends PTransform<?, ?>> implements TransformTranslator<T1> {
+    @Override
+    public void translateNode(T1 transform, TranslationContext context) {
 
-        }
+    }
 
-        @Override
-        public boolean canTranslate(T1 transform, TranslationContext context) {
-            return true;
-        }
+    @Override
+    public boolean canTranslate(T1 transform, TranslationContext context) {
+      return true;
+    }
 
-        static String describeTransform(
-                PTransform<?, ?> transform,
-                Map<TupleTag<?>, PValue> inputs,
-                Map<TupleTag<?>, PValue> outputs) {
-            return String.format("%s --> %s --> %s",
-                    Joiner.on('+').join(FluentIterable.from(inputs.entrySet())
-                            .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
-                                @Override
-                                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPValue) {
-                                    return taggedPValue.getKey().getId();
-                                    // return taggedPValue.getValue().getName();
-                                }})),
-                    transform.getName(),
-                    Joiner.on('+').join(FluentIterable.from(outputs.entrySet())
-                            .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
-                                @Override
-                                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPvalue) {
-                                    return taggedPvalue.getKey().getId();
-                                    //return taggedPValue.getValue().getName();
-                                }})));
-        }
+    static String describeTransform(
+        PTransform<?, ?> transform,
+        Map<TupleTag<?>, PValue> inputs,
+        Map<TupleTag<?>, PValue> outputs) {
+      return String.format("%s --> %s --> %s",
+          Joiner.on('+').join(FluentIterable.from(inputs.entrySet())
+              .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
+                @Override
+                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPValue) {
+                  return taggedPValue.getKey().getId();
+                  // return taggedPValue.getValue().getName();
+                }
+              })),
+          transform.getName(),
+          Joiner.on('+').join(FluentIterable.from(outputs.entrySet())
+              .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
+                @Override
+                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPvalue) {
+                  return taggedPvalue.getKey().getId();
+                  //return taggedPValue.getValue().getName();
+                }
+              })));
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java
index ac7d7bd..33ac024 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java
@@ -17,30 +17,30 @@
  */
 package org.apache.beam.runners.jstorm.translation.translator;
 
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
 import org.apache.beam.sdk.io.Read;
 import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.TaggedPValue;
 import org.apache.beam.sdk.values.TupleTag;
 
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
-
 /**
  * Translates a Read.Unbounded into a Storm spout.
- * 
+ *
  * @param <T>
  */
 public class UnboundedSourceTranslator<T> extends TransformTranslator.Default<Read.Unbounded<T>> {
-    public void translateNode(Read.Unbounded<T> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+  public void translateNode(Read.Unbounded<T> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    String description =
+        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
 
-        TupleTag<?> tag = userGraphContext.getOutputTag();
-        PValue output = userGraphContext.getOutput();
+    TupleTag<?> tag = userGraphContext.getOutputTag();
+    PValue output = userGraphContext.getOutput();
 
-        UnboundedSourceSpout spout = new UnboundedSourceSpout(
-                description,
-                transform.getSource(), userGraphContext.getOptions(), tag);
-        context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(tag, output));
-    }
+    UnboundedSourceSpout spout = new UnboundedSourceSpout(
+        description,
+        transform.getSource(), userGraphContext.getOptions(), tag);
+    context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(tag, output));
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
index 0ebf837..c55c8d6 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
@@ -17,6 +17,10 @@
  */
 package org.apache.beam.runners.jstorm.translation.translator;
 
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
 import org.apache.beam.runners.jstorm.translation.TranslationContext;
 import org.apache.beam.runners.jstorm.translation.runtime.ViewExecutor;
 import org.apache.beam.sdk.coders.Coder;
@@ -33,342 +37,342 @@ import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.PCollectionViews;
 
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
 /**
  * A {@link TransformTranslator} for executing {@link View Views} in JStorm runner.
  */
-public class ViewTranslator extends TransformTranslator.Default<ViewTranslator.CreateJStormPCollectionView<?, ?>> {
-    @Override
-    public void translateNode(CreateJStormPCollectionView<?, ?> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-        ViewExecutor viewExecutor = new ViewExecutor(description, userGraphContext.getOutputTag());
-        context.addTransformExecutor(viewExecutor);
+public class ViewTranslator
+    extends TransformTranslator.Default<ViewTranslator.CreateJStormPCollectionView<?, ?>> {
+  @Override
+  public void translateNode(
+      CreateJStormPCollectionView<?, ?> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    String description = describeTransform(
+        transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+    ViewExecutor viewExecutor = new ViewExecutor(description, userGraphContext.getOutputTag());
+    context.addTransformExecutor(viewExecutor);
+  }
+
+  /**
+   * Specialized implementation for
+   * {@link org.apache.beam.sdk.transforms.View.AsMap View.AsMap}
+   * for the Flink runner in streaming mode.
+   */
+  public static class ViewAsMap<K, V>
+      extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, V>>> {
+
+    @SuppressWarnings("unused") // used via reflection in JstormRunner#apply()
+    public ViewAsMap(View.AsMap<K, V> transform) {
     }
 
-    /**
-     * Specialized implementation for
-     * {@link org.apache.beam.sdk.transforms.View.AsMap View.AsMap}
-     * for the Flink runner in streaming mode.
-     */
-    public static class ViewAsMap<K, V>
-            extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, V>>> {
-
-        @SuppressWarnings("unused") // used via reflection in JstormRunner#apply()
-        public ViewAsMap(View.AsMap<K, V> transform) {
-        }
-
-        @Override
-        public PCollectionView<Map<K, V>> expand(PCollection<KV<K, V>> input) {
-            PCollectionView<Map<K, V>> view =
-                    PCollectionViews.mapView(
-                            input,
-                            input.getWindowingStrategy(),
-                            input.getCoder());
-
-            @SuppressWarnings({"rawtypes", "unchecked"})
-            KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
-            try {
-                inputCoder.getKeyCoder().verifyDeterministic();
-            } catch (Coder.NonDeterministicException e) {
-                // TODO: log warning as other runners.
-            }
-
-            return input
-                    .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
-                    .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, V>>of(view));
-        }
+    @Override
+    public PCollectionView<Map<K, V>> expand(PCollection<KV<K, V>> input) {
+      PCollectionView<Map<K, V>> view =
+          PCollectionViews.mapView(
+              input,
+              input.getWindowingStrategy(),
+              input.getCoder());
+
+      @SuppressWarnings({"rawtypes", "unchecked"})
+      KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
+      try {
+        inputCoder.getKeyCoder().verifyDeterministic();
+      } catch (Coder.NonDeterministicException e) {
+        // TODO: log warning as other runners.
+      }
+
+      return input
+          .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
+          .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, V>>of(view));
+    }
 
-        @Override
-        protected String getKindString() {
-            return "StreamingViewAsMap";
-        }
+    @Override
+    protected String getKindString() {
+      return "StreamingViewAsMap";
     }
+  }
+
+  /**
+   * Specialized expansion for {@link
+   * View.AsMultimap View.AsMultimap} for the
+   * Flink runner in streaming mode.
+   */
+  public static class ViewAsMultimap<K, V>
+      extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, Iterable<V>>>> {
 
     /**
-     * Specialized expansion for {@link
-     * View.AsMultimap View.AsMultimap} for the
-     * Flink runner in streaming mode.
+     * Builds an instance of this class from the overridden transform.
      */
-    public static class ViewAsMultimap<K, V>
-            extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, Iterable<V>>>> {
-
-        /**
-         * Builds an instance of this class from the overridden transform.
-         */
-        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-        public ViewAsMultimap(View.AsMultimap<K, V> transform) {
-        }
-
-        @Override
-        public PCollectionView<Map<K, Iterable<V>>> expand(PCollection<KV<K, V>> input) {
-            PCollectionView<Map<K, Iterable<V>>> view =
-                    PCollectionViews.multimapView(
-                            input,
-                            input.getWindowingStrategy(),
-                            input.getCoder());
-
-            @SuppressWarnings({"rawtypes", "unchecked"})
-            KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
-            try {
-                inputCoder.getKeyCoder().verifyDeterministic();
-            } catch (Coder.NonDeterministicException e) {
-                // TODO: log warning as other runners.
-            }
-
-            return input
-                    .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
-                    .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, Iterable<V>>>of(view));
-        }
+    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+    public ViewAsMultimap(View.AsMultimap<K, V> transform) {
+    }
 
-        @Override
-        protected String getKindString() {
-            return "StreamingViewAsMultimap";
-        }
+    @Override
+    public PCollectionView<Map<K, Iterable<V>>> expand(PCollection<KV<K, V>> input) {
+      PCollectionView<Map<K, Iterable<V>>> view =
+          PCollectionViews.multimapView(
+              input,
+              input.getWindowingStrategy(),
+              input.getCoder());
+
+      @SuppressWarnings({"rawtypes", "unchecked"})
+      KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
+      try {
+        inputCoder.getKeyCoder().verifyDeterministic();
+      } catch (Coder.NonDeterministicException e) {
+        // TODO: log warning as other runners.
+      }
+
+      return input
+          .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
+          .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, Iterable<V>>>of(view));
     }
 
+    @Override
+    protected String getKindString() {
+      return "StreamingViewAsMultimap";
+    }
+  }
+
+  /**
+   * Specialized implementation for
+   * {@link View.AsList View.AsList} for the
+   * JStorm runner in streaming mode.
+   */
+  public static class ViewAsList<T>
+      extends PTransform<PCollection<T>, PCollectionView<List<T>>> {
     /**
-     * Specialized implementation for
-     * {@link View.AsList View.AsList} for the
-     * JStorm runner in streaming mode.
+     * Builds an instance of this class from the overridden transform.
      */
-    public static class ViewAsList<T>
-            extends PTransform<PCollection<T>, PCollectionView<List<T>>> {
-        /**
-         * Builds an instance of this class from the overridden transform.
-         */
-        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-        public ViewAsList(View.AsList<T> transform) {}
-
-        @Override
-        public PCollectionView<List<T>> expand(PCollection<T> input) {
-            PCollectionView<List<T>> view =
-                    PCollectionViews.listView(
-                            input,
-                            input.getWindowingStrategy(),
-                            input.getCoder());
-
-            return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
-                    .apply(CreateJStormPCollectionView.<T, List<T>>of(view));
-        }
+    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+    public ViewAsList(View.AsList<T> transform) {
+    }
 
-        @Override
-        protected String getKindString() {
-            return "StreamingViewAsList";
-        }
+    @Override
+    public PCollectionView<List<T>> expand(PCollection<T> input) {
+      PCollectionView<List<T>> view =
+          PCollectionViews.listView(
+              input,
+              input.getWindowingStrategy(),
+              input.getCoder());
+
+      return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
+          .apply(CreateJStormPCollectionView.<T, List<T>>of(view));
     }
 
+    @Override
+    protected String getKindString() {
+      return "StreamingViewAsList";
+    }
+  }
+
+  /**
+   * Specialized implementation for
+   * {@link View.AsIterable View.AsIterable} for the
+   * JStorm runner in streaming mode.
+   */
+  public static class ViewAsIterable<T>
+      extends PTransform<PCollection<T>, PCollectionView<Iterable<T>>> {
     /**
-     * Specialized implementation for
-     * {@link View.AsIterable View.AsIterable} for the
-     * JStorm runner in streaming mode.
+     * Builds an instance of this class from the overridden transform.
      */
-    public static class ViewAsIterable<T>
-            extends PTransform<PCollection<T>, PCollectionView<Iterable<T>>> {
-        /**
-         * Builds an instance of this class from the overridden transform.
-         */
-        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-        public ViewAsIterable(View.AsIterable<T> transform) { }
-
-        @Override
-        public PCollectionView<Iterable<T>> expand(PCollection<T> input) {
-            PCollectionView<Iterable<T>> view =
-                    PCollectionViews.iterableView(
-                            input,
-                            input.getWindowingStrategy(),
-                            input.getCoder());
-
-            return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
-                    .apply(CreateJStormPCollectionView.<T, Iterable<T>>of(view));
-        }
-
-        @Override
-        protected String getKindString() {
-            return "StreamingViewAsIterable";
-        }
+    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+    public ViewAsIterable(View.AsIterable<T> transform) {
     }
 
-    /**
-     * Specialized expansion for
-     * {@link View.AsSingleton View.AsSingleton} for the
-     * JStorm runner in streaming mode.
-     */
-    public static class ViewAsSingleton<T>
-            extends PTransform<PCollection<T>, PCollectionView<T>> {
-        private View.AsSingleton<T> transform;
-
-        /**
-         * Builds an instance of this class from the overridden transform.
-         */
-        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-        public ViewAsSingleton(View.AsSingleton<T> transform) {
-            this.transform = transform;
-        }
+    @Override
+    public PCollectionView<Iterable<T>> expand(PCollection<T> input) {
+      PCollectionView<Iterable<T>> view =
+          PCollectionViews.iterableView(
+              input,
+              input.getWindowingStrategy(),
+              input.getCoder());
+
+      return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
+          .apply(CreateJStormPCollectionView.<T, Iterable<T>>of(view));
+    }
 
-        @Override
-        public PCollectionView<T> expand(PCollection<T> input) {
-            Combine.Globally<T, T> combine = Combine.globally(
-                    new SingletonCombine<>(transform.hasDefaultValue(), transform.defaultValue()));
-            if (!transform.hasDefaultValue()) {
-                combine = combine.withoutDefaults();
-            }
-            return input.apply(combine.asSingletonView());
-        }
+    @Override
+    protected String getKindString() {
+      return "StreamingViewAsIterable";
+    }
+  }
 
-        @Override
-        protected String getKindString() {
-            return "StreamingViewAsSingleton";
-        }
+  /**
+   * Specialized expansion for
+   * {@link View.AsSingleton View.AsSingleton} for the
+   * JStorm runner in streaming mode.
+   */
+  public static class ViewAsSingleton<T>
+      extends PTransform<PCollection<T>, PCollectionView<T>> {
+    private View.AsSingleton<T> transform;
 
-        private static class SingletonCombine<T> extends Combine.BinaryCombineFn<T> {
-            private boolean hasDefaultValue;
-            private T defaultValue;
-
-            SingletonCombine(boolean hasDefaultValue, T defaultValue) {
-                this.hasDefaultValue = hasDefaultValue;
-                this.defaultValue = defaultValue;
-            }
-
-            @Override
-            public T apply(T left, T right) {
-                throw new IllegalArgumentException("PCollection with more than one element "
-                        + "accessed as a singleton view. Consider using Combine.globally().asSingleton() to "
-                        + "combine the PCollection into a single value");
-            }
-
-            @Override
-            public T identity() {
-                if (hasDefaultValue) {
-                    return defaultValue;
-                } else {
-                    throw new IllegalArgumentException(
-                            "Empty PCollection accessed as a singleton view. "
-                                    + "Consider setting withDefault to provide a default value");
-                }
-            }
-        }
+    /**
+     * Builds an instance of this class from the overridden transform.
+     */
+    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+    public ViewAsSingleton(View.AsSingleton<T> transform) {
+      this.transform = transform;
     }
 
-    public static class CombineGloballyAsSingletonView<InputT, OutputT>
-            extends PTransform<PCollection<InputT>, PCollectionView<OutputT>> {
-        Combine.GloballyAsSingletonView<InputT, OutputT> transform;
-
-        /**
-         * Builds an instance of this class from the overridden transform.
-         */
-        @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply()
-        public CombineGloballyAsSingletonView(
-                Combine.GloballyAsSingletonView<InputT, OutputT> transform) {
-            this.transform = transform;
-        }
-
-        @Override
-        public PCollectionView<OutputT> expand(PCollection<InputT> input) {
-            PCollection<OutputT> combined =
-                    input.apply(Combine.globally(transform.getCombineFn())
-                            .withoutDefaults()
-                            .withFanout(transform.getFanout()));
-
-            PCollectionView<OutputT> view = PCollectionViews.singletonView(
-                    combined,
-                    combined.getWindowingStrategy(),
-                    transform.getInsertDefault(),
-                    transform.getInsertDefault()
-                            ? transform.getCombineFn().defaultValue() : null,
-                    combined.getCoder());
-            return combined
-                    .apply(ParDo.of(new WrapAsList<OutputT>()))
-                    .apply(CreateJStormPCollectionView.<OutputT, OutputT>of(view));
-        }
+    @Override
+    public PCollectionView<T> expand(PCollection<T> input) {
+      Combine.Globally<T, T> combine = Combine.globally(
+          new SingletonCombine<>(transform.hasDefaultValue(), transform.defaultValue()));
+      if (!transform.hasDefaultValue()) {
+        combine = combine.withoutDefaults();
+      }
+      return input.apply(combine.asSingletonView());
+    }
 
-        @Override
-        protected String getKindString() {
-            return "StreamingCombineGloballyAsSingletonView";
-        }
+    @Override
+    protected String getKindString() {
+      return "StreamingViewAsSingleton";
     }
 
-    private static class WrapAsList<T> extends DoFn<T, List<T>> {
-        @ProcessElement
-        public void processElement(ProcessContext c) {
-            c.output(Collections.singletonList(c.element()));
+    private static class SingletonCombine<T> extends Combine.BinaryCombineFn<T> {
+      private boolean hasDefaultValue;
+      private T defaultValue;
+
+      SingletonCombine(boolean hasDefaultValue, T defaultValue) {
+        this.hasDefaultValue = hasDefaultValue;
+        this.defaultValue = defaultValue;
+      }
+
+      @Override
+      public T apply(T left, T right) {
+        throw new IllegalArgumentException("PCollection with more than one element "
+            + "accessed as a singleton view. Consider using Combine.globally().asSingleton() to "
+            + "combine the PCollection into a single value");
+      }
+
+      @Override
+      public T identity() {
+        if (hasDefaultValue) {
+          return defaultValue;
+        } else {
+          throw new IllegalArgumentException(
+              "Empty PCollection accessed as a singleton view. "
+                  + "Consider setting withDefault to provide a default value");
         }
+      }
     }
+  }
+
+  public static class CombineGloballyAsSingletonView<InputT, OutputT>
+      extends PTransform<PCollection<InputT>, PCollectionView<OutputT>> {
+    Combine.GloballyAsSingletonView<InputT, OutputT> transform;
 
     /**
-     * Combiner that combines {@code T}s into a single {@code List<T>} containing all inputs.
-     * They require the input {@link PCollection} fits in memory.
-     * For a large {@link PCollection} this is expected to crash!
-     *
-     * @param <T> the type of elements to concatenate.
+     * Builds an instance of this class from the overridden transform.
      */
-    private static class Concatenate<T> extends Combine.CombineFn<T, List<T>, List<T>> {
-        private static final long serialVersionUID = 1L;
+    @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply()
+    public CombineGloballyAsSingletonView(
+        Combine.GloballyAsSingletonView<InputT, OutputT> transform) {
+      this.transform = transform;
+    }
 
-        @Override
-        public List<T> createAccumulator() {
-            return new ArrayList<>();
-        }
+    @Override
+    public PCollectionView<OutputT> expand(PCollection<InputT> input) {
+      PCollection<OutputT> combined =
+          input.apply(Combine.globally(transform.getCombineFn())
+              .withoutDefaults()
+              .withFanout(transform.getFanout()));
+
+      PCollectionView<OutputT> view = PCollectionViews.singletonView(
+          combined,
+          combined.getWindowingStrategy(),
+          transform.getInsertDefault(),
+          transform.getInsertDefault()
+              ? transform.getCombineFn().defaultValue() : null,
+          combined.getCoder());
+      return combined
+          .apply(ParDo.of(new WrapAsList<OutputT>()))
+          .apply(CreateJStormPCollectionView.<OutputT, OutputT>of(view));
+    }
 
-        @Override
-        public List<T> addInput(List<T> accumulator, T input) {
-            accumulator.add(input);
-            return accumulator;
-        }
+    @Override
+    protected String getKindString() {
+      return "StreamingCombineGloballyAsSingletonView";
+    }
+  }
 
-        @Override
-        public List<T> mergeAccumulators(Iterable<List<T>> accumulators) {
-            List<T> result = createAccumulator();
-            for (List<T> accumulator : accumulators) {
-                result.addAll(accumulator);
-            }
-            return result;
-        }
+  private static class WrapAsList<T> extends DoFn<T, List<T>> {
+    @ProcessElement
+    public void processElement(ProcessContext c) {
+      c.output(Collections.singletonList(c.element()));
+    }
+  }
+
+  /**
+   * Combiner that combines {@code T}s into a single {@code List<T>} containing all inputs.
+   * They require the input {@link PCollection} fits in memory.
+   * For a large {@link PCollection} this is expected to crash!
+   *
+   * @param <T> the type of elements to concatenate.
+   */
+  private static class Concatenate<T> extends Combine.CombineFn<T, List<T>, List<T>> {
+    private static final long serialVersionUID = 1L;
 
-        @Override
-        public List<T> extractOutput(List<T> accumulator) {
-            return accumulator;
-        }
+    @Override
+    public List<T> createAccumulator() {
+      return new ArrayList<>();
+    }
 
-        @Override
-        public Coder<List<T>> getAccumulatorCoder(CoderRegistry registry, Coder<T> inputCoder) {
-            return ListCoder.of(inputCoder);
-        }
+    @Override
+    public List<T> addInput(List<T> accumulator, T input) {
+      accumulator.add(input);
+      return accumulator;
+    }
 
-        @Override
-        public Coder<List<T>> getDefaultOutputCoder(CoderRegistry registry, Coder<T> inputCoder) {
-            return ListCoder.of(inputCoder);
-        }
+    @Override
+    public List<T> mergeAccumulators(Iterable<List<T>> accumulators) {
+      List<T> result = createAccumulator();
+      for (List<T> accumulator : accumulators) {
+        result.addAll(accumulator);
+      }
+      return result;
     }
 
-    /**
-     * Creates a primitive {@link PCollectionView}.
-     *
-     * <p>For internal use only by runner implementors.
-     *
-     * @param <ElemT> The type of the elements of the input PCollection
-     * @param <ViewT> The type associated with the {@link PCollectionView} used as a side input
-     */
-    public static class CreateJStormPCollectionView<ElemT, ViewT>
-            extends PTransform<PCollection<List<ElemT>>, PCollectionView<ViewT>> {
-        private PCollectionView<ViewT> view;
+    @Override
+    public List<T> extractOutput(List<T> accumulator) {
+      return accumulator;
+    }
 
-        private CreateJStormPCollectionView(PCollectionView<ViewT> view) {
-            this.view = view;
-        }
+    @Override
+    public Coder<List<T>> getAccumulatorCoder(CoderRegistry registry, Coder<T> inputCoder) {
+      return ListCoder.of(inputCoder);
+    }
 
-        public static <ElemT, ViewT> CreateJStormPCollectionView<ElemT, ViewT> of(
-                PCollectionView<ViewT> view) {
-            return new CreateJStormPCollectionView<>(view);
-        }
+    @Override
+    public Coder<List<T>> getDefaultOutputCoder(CoderRegistry registry, Coder<T> inputCoder) {
+      return ListCoder.of(inputCoder);
+    }
+  }
+
+  /**
+   * Creates a primitive {@link PCollectionView}.
+   * <p>
+   * <p>For internal use only by runner implementors.
+   *
+   * @param <ElemT> The type of the elements of the input PCollection
+   * @param <ViewT> The type associated with the {@link PCollectionView} used as a side input
+   */
+  public static class CreateJStormPCollectionView<ElemT, ViewT>
+      extends PTransform<PCollection<List<ElemT>>, PCollectionView<ViewT>> {
+    private PCollectionView<ViewT> view;
+
+    private CreateJStormPCollectionView(PCollectionView<ViewT> view) {
+      this.view = view;
+    }
 
-        @Override
-        public PCollectionView<ViewT> expand(PCollection<List<ElemT>> input) {
-            return view;
-        }
+    public static <ElemT, ViewT> CreateJStormPCollectionView<ElemT, ViewT> of(
+        PCollectionView<ViewT> view) {
+      return new CreateJStormPCollectionView<>(view);
+    }
+
+    @Override
+    public PCollectionView<ViewT> expand(PCollection<List<ElemT>> input) {
+      return view;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
index 0bf9a49..6de34dd 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
@@ -17,22 +17,22 @@
  */
 package org.apache.beam.runners.jstorm.translation.translator;
 
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
 import org.apache.beam.runners.jstorm.translation.runtime.WindowAssignExecutor;
 import org.apache.beam.sdk.transforms.windowing.Window;
 
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-
 public class WindowAssignTranslator<T> extends TransformTranslator.Default<Window.Assign<T>> {
 
-    @Override
-    public void translateNode(Window.Assign<T> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-        context.getUserGraphContext().setWindowed();
-        WindowAssignExecutor executor = new WindowAssignExecutor(
-                description,
-                transform.getWindowFn(),
-                userGraphContext.getOutputTag());
-        context.addTransformExecutor(executor);
-    }
+  @Override
+  public void translateNode(Window.Assign<T> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    String description =
+        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+    context.getUserGraphContext().setWindowed();
+    WindowAssignExecutor executor = new WindowAssignExecutor(
+        description,
+        transform.getWindowFn(),
+        userGraphContext.getOutputTag());
+    context.addTransformExecutor(executor);
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java
index b67aff9..c863c9e 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java
@@ -21,27 +21,27 @@ import org.apache.beam.runners.jstorm.translation.TranslationContext;
 import org.apache.beam.sdk.transforms.windowing.FixedWindows;
 import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
 import org.apache.beam.sdk.transforms.windowing.Window;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
  * Translates a Window.Bound node into a Storm WindowedBolt
- * 
+ *
  * @param <T>
  */
 public class WindowBoundTranslator<T> extends TransformTranslator.Default<Window.Assign<T>> {
-    private static final Logger LOG = LoggerFactory.getLogger(WindowBoundTranslator.class);
+  private static final Logger LOG = LoggerFactory.getLogger(WindowBoundTranslator.class);
 
-    // Do nothing here currently. The assign of window strategy is included in AssignTranslator.
-    @Override
-    public void translateNode(Window.Assign<T> transform, TranslationContext context) {
-        if (transform.getWindowFn() instanceof FixedWindows) {
-            context.getUserGraphContext().setWindowed();
-        } else if (transform.getWindowFn() instanceof SlidingWindows) {
-            context.getUserGraphContext().setWindowed();
-        } else {
-            throw new UnsupportedOperationException("Not supported window type currently: " + transform.getWindowFn());
-        }
+  // Do nothing here currently. The assign of window strategy is included in AssignTranslator.
+  @Override
+  public void translateNode(Window.Assign<T> transform, TranslationContext context) {
+    if (transform.getWindowFn() instanceof FixedWindows) {
+      context.getUserGraphContext().setWindowed();
+    } else if (transform.getWindowFn() instanceof SlidingWindows) {
+      context.getUserGraphContext().setWindowed();
+    } else {
+      throw new UnsupportedOperationException(
+          "Not supported window type currently: " + transform.getWindowFn());
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
index 07a3ad5..596d8b4 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
@@ -18,8 +18,8 @@
 package org.apache.beam.runners.jstorm.translation.util;
 
 public class CommonInstance {
-    public static final String KEY = "Key";
-    public static final String VALUE = "Value";
+  public static final String KEY = "Key";
+  public static final String VALUE = "Value";
 
-    public static final String BEAM_WATERMARK_STREAM_ID = "BEAM_WATERMARK";
+  public static final String BEAM_WATERMARK_STREAM_ID = "BEAM_WATERMARK";
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java
index 87562fd..750095e 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java
@@ -17,30 +17,29 @@
  */
 package org.apache.beam.runners.jstorm.translation.util;
 
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import java.io.Serializable;
+import javax.annotation.Nullable;
 import org.apache.beam.runners.core.SideInputReader;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.values.PCollectionView;
 
-import javax.annotation.Nullable;
-import java.io.Serializable;
-
 /**
  * No-op SideInputReader implementation.
  */
 public class DefaultSideInputReader implements SideInputReader, Serializable {
-    @Nullable
-    @Override
-    public <T> T get(PCollectionView<T> pCollectionView, BoundedWindow boundedWindow) {
-        return null;
-    }
+  @Nullable
+  @Override
+  public <T> T get(PCollectionView<T> pCollectionView, BoundedWindow boundedWindow) {
+    return null;
+  }
 
-    @Override
-    public <T> boolean contains(PCollectionView<T> pCollectionView) {
-        return false;
-    }
+  @Override
+  public <T> boolean contains(PCollectionView<T> pCollectionView) {
+    return false;
+  }
 
-    @Override
-    public boolean isEmpty() {
-        return true;
-    }
+  @Override
+  public boolean isEmpty() {
+    return true;
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java
index 481b7fb..4eb1d8f 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java
@@ -17,73 +17,74 @@
  */
 package org.apache.beam.runners.jstorm.translation.util;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import java.io.IOException;
 import org.apache.beam.runners.core.ExecutionContext;
-import org.apache.beam.runners.core.TimerInternals;
 import org.apache.beam.runners.core.StateInternals;
+import org.apache.beam.runners.core.TimerInternals;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.TupleTag;
 
-import java.io.IOException;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
 /**
  * Default StepContext for running DoFn This does not allow accessing state or timer internals.
  */
 public class DefaultStepContext implements ExecutionContext.StepContext {
 
-    private TimerInternals timerInternals;
+  private TimerInternals timerInternals;
 
-    private StateInternals stateInternals;
+  private StateInternals stateInternals;
 
-    public DefaultStepContext(TimerInternals timerInternals, StateInternals stateInternals) {
-        this.timerInternals = checkNotNull(timerInternals, "timerInternals");
-        this.stateInternals = checkNotNull(stateInternals, "stateInternals");
-    }
+  public DefaultStepContext(TimerInternals timerInternals, StateInternals stateInternals) {
+    this.timerInternals = checkNotNull(timerInternals, "timerInternals");
+    this.stateInternals = checkNotNull(stateInternals, "stateInternals");
+  }
 
-    @Override
-    public String getStepName() {
-        return null;
-    }
+  @Override
+  public String getStepName() {
+    return null;
+  }
 
-    @Override
-    public String getTransformName() {
-        return null;
-    }
+  @Override
+  public String getTransformName() {
+    return null;
+  }
 
-    @Override
-    public void noteOutput(WindowedValue<?> windowedValue) {
+  @Override
+  public void noteOutput(WindowedValue<?> windowedValue) {
 
-    }
+  }
 
-    @Override
-    public void noteOutput(TupleTag<?> tupleTag, WindowedValue<?> windowedValue) {
+  @Override
+  public void noteOutput(TupleTag<?> tupleTag, WindowedValue<?> windowedValue) {
 
-    }
+  }
 
-    @Override
-    public <T, W extends BoundedWindow> void writePCollectionViewData(TupleTag<?> tag, Iterable<WindowedValue<T>> data,
-            Coder<Iterable<WindowedValue<T>>> dataCoder, W window, Coder<W> windowCoder) throws IOException {
-        throw new UnsupportedOperationException("Writing side-input data is not supported.");
-    }
+  @Override
+  public <T, W extends BoundedWindow> void writePCollectionViewData(
+      TupleTag<?> tag, Iterable<WindowedValue<T>> data,
+      Coder<Iterable<WindowedValue<T>>> dataCoder, W window, Coder<W> windowCoder)
+      throws IOException {
+    throw new UnsupportedOperationException("Writing side-input data is not supported.");
+  }
 
-    @Override
-    public StateInternals stateInternals() {
-        return stateInternals;
-    }
+  @Override
+  public StateInternals stateInternals() {
+    return stateInternals;
+  }
 
-    @Override
-    public TimerInternals timerInternals() {
-        return timerInternals;
-    }
+  @Override
+  public TimerInternals timerInternals() {
+    return timerInternals;
+  }
 
-    public void setStateInternals(StateInternals stateInternals) {
-        this.stateInternals = stateInternals;
-    }
+  public void setStateInternals(StateInternals stateInternals) {
+    this.stateInternals = stateInternals;
+  }
 
-    public void setTimerInternals(TimerInternals timerInternals) {
-        this.timerInternals = timerInternals;
-    }
+  public void setTimerInternals(TimerInternals timerInternals) {
+    this.timerInternals = timerInternals;
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
index cbf815a..9fd62e4 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
@@ -17,37 +17,37 @@
  */
 package org.apache.beam.runners.jstorm.util;
 
+import org.apache.beam.runners.core.KeyedWorkItem;
 import org.apache.beam.runners.jstorm.translation.runtime.Executor;
-
 import org.apache.beam.runners.jstorm.translation.runtime.GroupByWindowExecutor;
 import org.apache.beam.runners.jstorm.translation.runtime.MultiStatefulDoFnExecutor;
 import org.apache.beam.runners.jstorm.translation.runtime.StatefulDoFnExecutor;
-import org.apache.beam.runners.core.KeyedWorkItem;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.KV;
 
 public class RunnerUtils {
-    /**
-     * Convert WindowedValue<KV<>> into KeyedWorkItem<K, WindowedValue<V>>
-     * @param elem
-     * @return
-     */
-    public static <K, V> KeyedWorkItem<K, V> toKeyedWorkItem(WindowedValue<KV<K, V>> elem) {
-        WindowedValue<KV<K, V>> kvElem = (WindowedValue<KV<K, V>>) elem;
-        SingletonKeyedWorkItem<K, V> workItem = SingletonKeyedWorkItem.of(
-                kvElem.getValue().getKey(),
-                kvElem.withValue(kvElem.getValue().getValue()));
-        return workItem;
-    }
+  /**
+   * Convert WindowedValue<KV<>> into KeyedWorkItem<K, WindowedValue<V>>
+   *
+   * @param elem
+   * @return
+   */
+  public static <K, V> KeyedWorkItem<K, V> toKeyedWorkItem(WindowedValue<KV<K, V>> elem) {
+    WindowedValue<KV<K, V>> kvElem = (WindowedValue<KV<K, V>>) elem;
+    SingletonKeyedWorkItem<K, V> workItem = SingletonKeyedWorkItem.of(
+        kvElem.getValue().getKey(),
+        kvElem.withValue(kvElem.getValue().getValue()));
+    return workItem;
+  }
 
-    public static boolean isGroupByKeyExecutor (Executor executor) {
-        if (executor instanceof GroupByWindowExecutor) {
-            return true;
-        } else if (executor instanceof StatefulDoFnExecutor ||
-                executor instanceof MultiStatefulDoFnExecutor) {
-            return true;
-        } else {
-            return false;
-        }
+  public static boolean isGroupByKeyExecutor(Executor executor) {
+    if (executor instanceof GroupByWindowExecutor) {
+      return true;
+    } else if (executor instanceof StatefulDoFnExecutor ||
+        executor instanceof MultiStatefulDoFnExecutor) {
+      return true;
+    } else {
+      return false;
     }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
index 391699b..182794f 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
@@ -18,47 +18,48 @@
 
 package org.apache.beam.runners.jstorm.util;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.beam.sdk.options.PipelineOptions;
+import static com.google.common.base.Preconditions.checkNotNull;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.Serializable;
-
-import static com.google.common.base.Preconditions.checkNotNull;
+import org.apache.beam.sdk.options.PipelineOptions;
 
 /**
  * Encapsulates the PipelineOptions in serialized form to ship them to the cluster.
  */
 public class SerializedPipelineOptions implements Serializable {
 
-    private final byte[] serializedOptions;
+  private final byte[] serializedOptions;
 
-    /** Lazily initialized copy of deserialized options */
-    private transient PipelineOptions pipelineOptions;
+  /**
+   * Lazily initialized copy of deserialized options
+   */
+  private transient PipelineOptions pipelineOptions;
 
-    public SerializedPipelineOptions(PipelineOptions options) {
-        checkNotNull(options, "PipelineOptions must not be null.");
-
-        try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
-            new ObjectMapper().writeValue(baos, options);
-            this.serializedOptions = baos.toByteArray();
-        } catch (Exception e) {
-            throw new RuntimeException("Couldn't serialize PipelineOptions.", e);
-        }
+  public SerializedPipelineOptions(PipelineOptions options) {
+    checkNotNull(options, "PipelineOptions must not be null.");
 
+    try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
+      new ObjectMapper().writeValue(baos, options);
+      this.serializedOptions = baos.toByteArray();
+    } catch (Exception e) {
+      throw new RuntimeException("Couldn't serialize PipelineOptions.", e);
     }
 
-    public PipelineOptions getPipelineOptions() {
-        if (pipelineOptions == null) {
-            try {
-                pipelineOptions = new ObjectMapper().readValue(serializedOptions, PipelineOptions.class);
-            } catch (IOException e) {
-                throw new RuntimeException("Couldn't deserialize the PipelineOptions.", e);
-            }
-        }
+  }
 
-        return pipelineOptions;
+  public PipelineOptions getPipelineOptions() {
+    if (pipelineOptions == null) {
+      try {
+        pipelineOptions = new ObjectMapper().readValue(serializedOptions, PipelineOptions.class);
+      } catch (IOException e) {
+        throw new RuntimeException("Couldn't deserialize the PipelineOptions.", e);
+      }
     }
 
+    return pipelineOptions;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
index dee5f1a..cce21b3 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
@@ -24,6 +24,7 @@ import org.apache.beam.sdk.util.WindowedValue;
 
 /**
  * Singleton keyed word item.
+ *
  * @param <K>
  * @param <ElemT>
  */
@@ -38,7 +39,7 @@ public class SingletonKeyedWorkItem<K, ElemT> implements KeyedWorkItem<K, ElemT>
   }
 
   public static <K, ElemT> SingletonKeyedWorkItem<K, ElemT> of(K key, WindowedValue<ElemT> value) {
-      return new SingletonKeyedWorkItem<K, ElemT>(key, value);
+    return new SingletonKeyedWorkItem<K, ElemT>(key, value);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/JStormRunnerRegistrarTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/JStormRunnerRegistrarTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/JStormRunnerRegistrarTest.java
index 344d3c7..0d6fc23 100644
--- a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/JStormRunnerRegistrarTest.java
+++ b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/JStormRunnerRegistrarTest.java
@@ -34,7 +34,7 @@ public class JStormRunnerRegistrarTest {
   @Test
   public void testFullName() {
     String[] args =
-        new String[] {String.format("--runner=%s", JStormRunner.class.getName())};
+        new String[]{String.format("--runner=%s", JStormRunner.class.getName())};
     PipelineOptions opts = PipelineOptionsFactory.fromArgs(args).create();
     assertEquals(opts.getRunner(), JStormRunner.class);
   }
@@ -42,7 +42,7 @@ public class JStormRunnerRegistrarTest {
   @Test
   public void testClassName() {
     String[] args =
-        new String[] {String.format("--runner=%s", JStormRunner.class.getSimpleName())};
+        new String[]{String.format("--runner=%s", JStormRunner.class.getSimpleName())};
     PipelineOptions opts = PipelineOptionsFactory.fromArgs(args).create();
     assertEquals(opts.getRunner(), JStormRunner.class);
   }


[14/53] [abbrv] beam git commit: jstorm-runner: fix checkstyles.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java
index d907fac..6d6f1c6 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java
@@ -17,15 +17,17 @@
  */
 package org.apache.beam.runners.jstorm.translation;
 
-import org.apache.beam.runners.jstorm.translation.translator.ViewTranslator;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Iterables;
+import java.util.List;
 import org.apache.beam.runners.core.construction.PTransformMatchers;
 import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
+import org.apache.beam.runners.jstorm.translation.translator.TransformTranslator;
+import org.apache.beam.runners.jstorm.translation.translator.ViewTranslator;
 import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.runners.AppliedPTransform;
 import org.apache.beam.sdk.runners.PTransformOverride;
 import org.apache.beam.sdk.runners.TransformHierarchy;
-import org.apache.beam.sdk.runners.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.View;
@@ -34,144 +36,151 @@ import org.apache.beam.sdk.values.PValue;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.beam.runners.jstorm.translation.translator.TransformTranslator;
-
-import java.util.List;
-
 /**
  * Pipleline translator of Storm
  */
 public class StormPipelineTranslator extends Pipeline.PipelineVisitor.Defaults {
-    private static final Logger LOG = LoggerFactory.getLogger(StormPipelineTranslator.class);
-    private TranslationContext context;
-    private int depth = 0;
-
-    public StormPipelineTranslator(TranslationContext context) {
-        this.context = context;
-    }
-
-    public void translate(Pipeline pipeline) {
-        List<PTransformOverride> transformOverrides =
-                ImmutableList.<PTransformOverride>builder()
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsIterable.class),
-                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsIterable.class)))
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsList.class),
-                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsList.class)))
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMap.class),
-                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMap.class)))
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMultimap.class),
-                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMultimap.class)))
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsSingleton.class),
-                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsSingleton.class)))
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(Combine.GloballyAsSingletonView.class),
-                               new ReflectiveOneToOneOverrideFactory((ViewTranslator.CombineGloballyAsSingletonView.class))))
-                        .build();
-        pipeline.replaceAll(transformOverrides);
-        pipeline.traverseTopologically(this);
-    }
-
-    @Override
-    public CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {
-        LOG.info(genSpaces(this.depth) + "enterCompositeTransform- " + node);
-        this.depth++;
-
-        // check if current composite transforms need to be translated. 
-        // If not, all sub transforms will be translated in visitPrimitiveTransform.
-        PTransform<?, ?> transform = node.getTransform();
-        if (transform != null) {
-            TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
-
-            if (translator != null && applyCanTranslate(transform, node, translator)) {
-                applyStreamingTransform(transform, node, translator);
-                LOG.info(genSpaces(this.depth) + "translated-" + node);
-                return CompositeBehavior.DO_NOT_ENTER_TRANSFORM;
-            }
-        }
-        return CompositeBehavior.ENTER_TRANSFORM;
-    }
-
-    public void leaveCompositeTransform(TransformHierarchy.Node node) {
-        this.depth--;
-        LOG.info(genSpaces(this.depth) + "leaveCompositeTransform- " + node);
+  private static final Logger LOG = LoggerFactory.getLogger(StormPipelineTranslator.class);
+  private TranslationContext context;
+  private int depth = 0;
+
+  public StormPipelineTranslator(TranslationContext context) {
+    this.context = context;
+  }
+
+  public void translate(Pipeline pipeline) {
+    List<PTransformOverride> transformOverrides =
+        ImmutableList.<PTransformOverride>builder()
+            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsIterable.class),
+                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsIterable.class)))
+            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsList.class),
+                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsList.class)))
+            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMap.class),
+                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMap.class)))
+            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMultimap.class),
+                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMultimap.class)))
+            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsSingleton.class),
+                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsSingleton.class)))
+            .add(PTransformOverride.of(
+                PTransformMatchers.classEqualTo(Combine.GloballyAsSingletonView.class),
+                new ReflectiveOneToOneOverrideFactory(
+                    (ViewTranslator.CombineGloballyAsSingletonView.class))))
+            .build();
+    pipeline.replaceAll(transformOverrides);
+    pipeline.traverseTopologically(this);
+  }
+
+  @Override
+  public CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {
+    LOG.info(genSpaces(this.depth) + "enterCompositeTransform- " + node);
+    this.depth++;
+
+    // check if current composite transforms need to be translated.
+    // If not, all sub transforms will be translated in visitPrimitiveTransform.
+    PTransform<?, ?> transform = node.getTransform();
+    if (transform != null) {
+      TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
+
+      if (translator != null && applyCanTranslate(transform, node, translator)) {
+        applyStreamingTransform(transform, node, translator);
+        LOG.info(genSpaces(this.depth) + "translated-" + node);
+        return CompositeBehavior.DO_NOT_ENTER_TRANSFORM;
+      }
     }
-
-    public void visitPrimitiveTransform(TransformHierarchy.Node node) {
-        LOG.info(genSpaces(this.depth) + "visitPrimitiveTransform- " + node);
-
-        if (!node.isRootNode()) {
-            PTransform<?, ?> transform = node.getTransform();
-            TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
-            if (translator == null || !applyCanTranslate(transform, node, translator)) {
-                LOG.info(node.getTransform().getClass().toString());
-                throw new UnsupportedOperationException("The transform " + transform + " is currently not supported.");
-            }
-            applyStreamingTransform(transform, node, translator);
-        }
+    return CompositeBehavior.ENTER_TRANSFORM;
+  }
+
+  public void leaveCompositeTransform(TransformHierarchy.Node node) {
+    this.depth--;
+    LOG.info(genSpaces(this.depth) + "leaveCompositeTransform- " + node);
+  }
+
+  public void visitPrimitiveTransform(TransformHierarchy.Node node) {
+    LOG.info(genSpaces(this.depth) + "visitPrimitiveTransform- " + node);
+
+    if (!node.isRootNode()) {
+      PTransform<?, ?> transform = node.getTransform();
+      TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
+      if (translator == null || !applyCanTranslate(transform, node, translator)) {
+        LOG.info(node.getTransform().getClass().toString());
+        throw new UnsupportedOperationException(
+            "The transform " + transform + " is currently not supported.");
+      }
+      applyStreamingTransform(transform, node, translator);
     }
-
-    public void visitValue(PValue value, TransformHierarchy.Node node) {
-        LOG.info(genSpaces(this.depth) + "visiting value {}", value);
+  }
+
+  public void visitValue(PValue value, TransformHierarchy.Node node) {
+    LOG.info(genSpaces(this.depth) + "visiting value {}", value);
+  }
+
+  private <T extends PTransform<?, ?>> void applyStreamingTransform(
+      PTransform<?, ?> transform,
+      TransformHierarchy.Node node,
+      TransformTranslator<?> translator) {
+    @SuppressWarnings("unchecked")
+    T typedTransform = (T) transform;
+    @SuppressWarnings("unchecked")
+    TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
+
+    context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
+    typedTranslator.translateNode(typedTransform, context);
+
+    // Maintain PValue to TupleTag map for side inputs translation.
+    context.getUserGraphContext().recordOutputTaggedPValue();
+  }
+
+  private <T extends PTransform<?, ?>> boolean applyCanTranslate(
+      PTransform<?, ?> transform,
+      TransformHierarchy.Node node,
+      TransformTranslator<?> translator) {
+    @SuppressWarnings("unchecked")
+    T typedTransform = (T) transform;
+    @SuppressWarnings("unchecked")
+    TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
+
+    context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
+
+    return typedTranslator.canTranslate(typedTransform, context);
+  }
+
+  /**
+   * Utility formatting method.
+   *
+   * @param n number of spaces to generate
+   * @return String with "|" followed by n spaces
+   */
+  protected static String genSpaces(int n) {
+    StringBuilder builder = new StringBuilder();
+    for (int i = 0; i < n; i++) {
+      builder.append("|   ");
     }
-
-    private <T extends PTransform<?, ?>> void applyStreamingTransform(PTransform<?, ?> transform, TransformHierarchy.Node node,
-            TransformTranslator<?> translator) {
-        @SuppressWarnings("unchecked")
-        T typedTransform = (T) transform;
-        @SuppressWarnings("unchecked")
-        TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
-
-        context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
-        typedTranslator.translateNode(typedTransform, context);
-
-        // Maintain PValue to TupleTag map for side inputs translation.
-        context.getUserGraphContext().recordOutputTaggedPValue();
+    return builder.toString();
+  }
+
+  private static class ReflectiveOneToOneOverrideFactory<
+      InputT extends PValue,
+      OutputT extends PValue,
+      TransformT extends PTransform<InputT, OutputT>>
+      extends SingleInputOutputOverrideFactory<InputT, OutputT, TransformT> {
+    private final Class<PTransform<InputT, OutputT>> replacement;
+
+    private ReflectiveOneToOneOverrideFactory(
+        Class<PTransform<InputT, OutputT>> replacement) {
+      this.replacement = replacement;
     }
 
-    private <T extends PTransform<?, ?>> boolean applyCanTranslate(PTransform<?, ?> transform, TransformHierarchy.Node node, TransformTranslator<?> translator) {
-        @SuppressWarnings("unchecked")
-        T typedTransform = (T) transform;
-        @SuppressWarnings("unchecked")
-        TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
-
-        context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
-
-        return typedTranslator.canTranslate(typedTransform, context);
-    }
-
-    /**
-     * Utility formatting method.
-     * 
-     * @param n number of spaces to generate
-     * @return String with "|" followed by n spaces
-     */
-    protected static String genSpaces(int n) {
-        StringBuilder builder = new StringBuilder();
-        for (int i = 0; i < n; i++) {
-            builder.append("|   ");
-        }
-        return builder.toString();
-    }
-
-    private static class ReflectiveOneToOneOverrideFactory<
-            InputT extends PValue,
-            OutputT extends PValue,
-            TransformT extends PTransform<InputT, OutputT>>
-            extends SingleInputOutputOverrideFactory<InputT, OutputT, TransformT> {
-        private final Class<PTransform<InputT, OutputT>> replacement;
-
-        private ReflectiveOneToOneOverrideFactory(
-                Class<PTransform<InputT, OutputT>> replacement) {
-            this.replacement = replacement;
-        }
-
-        @Override
-        public PTransformReplacement<InputT, OutputT> getReplacementTransform(AppliedPTransform<InputT, OutputT, TransformT> appliedPTransform) {
-            PTransform<InputT, OutputT> originalPTransform = appliedPTransform.getTransform();
-            PTransform<InputT, OutputT> replacedPTransform = InstanceBuilder.ofType(replacement)
-                    .withArg((Class<PTransform<InputT, OutputT>>) originalPTransform.getClass(), originalPTransform)
-                    .build();
-            InputT inputT = (InputT) Iterables.getOnlyElement(appliedPTransform.getInputs().values());
-            return PTransformReplacement.of(inputT, replacedPTransform);
-        }
+    @Override
+    public PTransformReplacement<InputT, OutputT> getReplacementTransform(
+        AppliedPTransform<InputT, OutputT, TransformT> appliedPTransform) {
+      PTransform<InputT, OutputT> originalPTransform = appliedPTransform.getTransform();
+      PTransform<InputT, OutputT> replacedPTransform = InstanceBuilder.ofType(replacement)
+          .withArg(
+              (Class<PTransform<InputT, OutputT>>) originalPTransform.getClass(),
+              originalPTransform)
+          .build();
+      InputT inputT = (InputT) Iterables.getOnlyElement(appliedPTransform.getInputs().values());
+      return PTransformReplacement.of(inputT, replacedPTransform);
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
index 707202b..526352a 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
@@ -17,19 +17,29 @@
  */
 package org.apache.beam.runners.jstorm.translation;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
 import avro.shaded.com.google.common.collect.Lists;
-import org.apache.beam.runners.jstorm.JStormPipelineOptions;
-import org.apache.beam.runners.jstorm.translation.translator.Stream;
-import org.apache.beam.runners.jstorm.util.RunnerUtils;
-import com.google.common.base.Strings;
 import com.google.common.base.Function;
 import com.google.common.base.Joiner;
+import com.google.common.base.Strings;
 import com.google.common.collect.FluentIterable;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.runners.jstorm.JStormPipelineOptions;
 import org.apache.beam.runners.jstorm.translation.runtime.AdaptorBasicSpout;
 import org.apache.beam.runners.jstorm.translation.runtime.Executor;
+import org.apache.beam.runners.jstorm.translation.runtime.ExecutorsBolt;
+import org.apache.beam.runners.jstorm.translation.translator.Stream;
 import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
+import org.apache.beam.runners.jstorm.util.RunnerUtils;
 import org.apache.beam.sdk.runners.AppliedPTransform;
 import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.PValueBase;
@@ -38,387 +48,392 @@ import org.apache.beam.sdk.values.TupleTag;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.beam.runners.jstorm.translation.runtime.ExecutorsBolt;
-
-import java.util.*;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-import static com.google.common.base.Preconditions.checkState;
-
 /**
  * Maintains the state necessary during Pipeline translation to build a Storm topology.
  */
 public class TranslationContext {
-    private static final Logger LOG = LoggerFactory.getLogger(TranslationContext.class);
-
-    private final UserGraphContext userGraphContext;
-    private final ExecutionGraphContext executionGraphContext;
-
-    public TranslationContext(JStormPipelineOptions options) {
-        this.userGraphContext = new UserGraphContext(options);
-        this.executionGraphContext = new ExecutionGraphContext();
+  private static final Logger LOG = LoggerFactory.getLogger(TranslationContext.class);
+
+  private final UserGraphContext userGraphContext;
+  private final ExecutionGraphContext executionGraphContext;
+
+  public TranslationContext(JStormPipelineOptions options) {
+    this.userGraphContext = new UserGraphContext(options);
+    this.executionGraphContext = new ExecutionGraphContext();
+  }
+
+  public ExecutionGraphContext getExecutionGraphContext() {
+    return executionGraphContext;
+  }
+
+  public UserGraphContext getUserGraphContext() {
+    return userGraphContext;
+  }
+
+  private void addStormStreamDef(
+      TaggedPValue input, String destComponentName, Stream.Grouping grouping) {
+    Stream.Producer producer = executionGraphContext.getProducer(input.getValue());
+    if (!producer.getComponentId().equals(destComponentName)) {
+      Stream.Consumer consumer = Stream.Consumer.of(destComponentName, grouping);
+      executionGraphContext.registerStreamConsumer(consumer, producer);
+
+      ExecutorsBolt executorsBolt = executionGraphContext.getBolt(producer.getComponentId());
+      if (executorsBolt != null) {
+        executorsBolt.addExternalOutputTag(input.getTag());
+      }
     }
-
-    public ExecutionGraphContext getExecutionGraphContext() {
-        return executionGraphContext;
+  }
+
+  private String getUpstreamExecutorsBolt() {
+    for (PValue value : userGraphContext.getInputs().values()) {
+      String componentId = executionGraphContext.getProducerComponentId(value);
+      if (componentId != null && executionGraphContext.getBolt(componentId) != null) {
+        return componentId;
+      }
     }
-
-    public UserGraphContext getUserGraphContext() {
-        return userGraphContext;
+    // When upstream component is spout, "null" will be return.
+    return null;
+  }
+
+  /**
+   * check if the current transform is applied to source collection.
+   *
+   * @return
+   */
+  private boolean connectedToSource() {
+    for (PValue value : userGraphContext.getInputs().values()) {
+      if (executionGraphContext.producedBySpout(value)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * @param upstreamExecutorsBolt
+   * @return true if there is multiple input streams, or upstream executor output the same stream
+   * to different executors
+   */
+  private boolean isMultipleInputOrOutput(
+      ExecutorsBolt upstreamExecutorsBolt, Map<TupleTag<?>, PValue> inputs) {
+    if (inputs.size() > 1) {
+      return true;
+    } else {
+      final Sets.SetView<TupleTag> intersection =
+          Sets.intersection(upstreamExecutorsBolt.getExecutors().keySet(), inputs.keySet());
+      if (!intersection.isEmpty()) {
+        // there is already a different executor consume the same input
+        return true;
+      } else {
+        return false;
+      }
     }
+  }
 
-    private void addStormStreamDef(TaggedPValue input, String destComponentName, Stream.Grouping grouping) {
-        Stream.Producer producer = executionGraphContext.getProducer(input.getValue());
-        if (!producer.getComponentId().equals(destComponentName)) {
-            Stream.Consumer consumer = Stream.Consumer.of(destComponentName, grouping);
-            executionGraphContext.registerStreamConsumer(consumer, producer);
+  public void addTransformExecutor(Executor executor) {
+    addTransformExecutor(executor, Collections.EMPTY_LIST);
+  }
 
-            ExecutorsBolt executorsBolt = executionGraphContext.getBolt(producer.getComponentId());
-            if (executorsBolt != null) {
-                executorsBolt.addExternalOutputTag(input.getTag());
-            }
-        }
-    }
+  public void addTransformExecutor(Executor executor, List<PValue> sideInputs) {
+    addTransformExecutor(
+        executor, userGraphContext.getInputs(), userGraphContext.getOutputs(), sideInputs);
+  }
 
-    private String getUpstreamExecutorsBolt() {
-        for (PValue value : userGraphContext.getInputs().values()) {
-            String componentId = executionGraphContext.getProducerComponentId(value);
-            if (componentId != null && executionGraphContext.getBolt(componentId) != null) {
-                return componentId;
-            }
-        }
-        // When upstream component is spout, "null" will be return.
-        return null;
-    }
+  public void addTransformExecutor(
+      Executor executor, Map<TupleTag<?>, PValue> inputs, Map<TupleTag<?>, PValue> outputs) {
+    addTransformExecutor(executor, inputs, outputs, Collections.EMPTY_LIST);
+  }
 
-    /**
-     * check if the current transform is applied to source collection.
-     * @return
-     */
-    private boolean connectedToSource() {
-        for (PValue value : userGraphContext.getInputs().values()) {
-            if (executionGraphContext.producedBySpout(value)) {
-                return true;
-            }
-        }
-        return false;
-    }
+  public void addTransformExecutor(
+      Executor executor,
+      Map<TupleTag<?>, PValue> inputs,
+      Map<TupleTag<?>, PValue> outputs,
+      List<PValue> sideInputs) {
+    String name = null;
 
+    ExecutorsBolt bolt = null;
+
+    boolean isGBK = false;
     /**
-     * @param upstreamExecutorsBolt
-     * @return true if there is multiple input streams, or upstream executor output the same stream
-     *          to different executors
+     * Check if the transform executor needs to be chained into an existing ExecutorsBolt.
+     * For following cases, a new bolt is created for the specified executor, otherwise the executor
+     * will be added into the bolt contains corresponding upstream executor.
+     * a) it is a GroupByKey executor
+     * b) it is connected to source directly
+     * c) None existing upstream bolt was found
+     * d) For the purpose of performance to reduce the side effects between multiple streams which
+     *    is output to same executor, a new bolt will be created.
      */
-    private boolean isMultipleInputOrOutput(ExecutorsBolt upstreamExecutorsBolt, Map<TupleTag<?>, PValue> inputs) {
-        if (inputs.size() > 1) {
-            return true;
-        } else {
-            final Sets.SetView<TupleTag> intersection = Sets.intersection(upstreamExecutorsBolt.getExecutors().keySet(), inputs.keySet());
-            if (!intersection.isEmpty()) {
-                // there is already a different executor consume the same input
-                return true;
-            } else {
-                return false;
-            }
-        }
+    if (RunnerUtils.isGroupByKeyExecutor(executor)) {
+      bolt = new ExecutorsBolt();
+      name = executionGraphContext.registerBolt(bolt);
+      isGBK = true;
+    } else if (connectedToSource()) {
+      bolt = new ExecutorsBolt();
+      name = executionGraphContext.registerBolt(bolt);
+    } else {
+      name = getUpstreamExecutorsBolt();
+      if (name == null) {
+        bolt = new ExecutorsBolt();
+        name = executionGraphContext.registerBolt(bolt);
+      } else {
+        bolt = executionGraphContext.getBolt(name);
+        if (isMultipleInputOrOutput(bolt, inputs)) {
+          bolt = new ExecutorsBolt();
+          name = executionGraphContext.registerBolt(bolt);
+        }
+      }
     }
 
-    public void addTransformExecutor(Executor executor) {
-        addTransformExecutor(executor, Collections.EMPTY_LIST);
-    }
-
-    public void addTransformExecutor(Executor executor, List<PValue> sideInputs) {
-        addTransformExecutor(executor, userGraphContext.getInputs(), userGraphContext.getOutputs(), sideInputs);
-    }
-
-    public void addTransformExecutor(Executor executor, Map<TupleTag<?>, PValue> inputs, Map<TupleTag<?>, PValue> outputs) {
-        addTransformExecutor(executor, inputs, outputs, Collections.EMPTY_LIST);
-    }
-
-    public void addTransformExecutor(Executor executor, Map<TupleTag<?>, PValue> inputs, Map<TupleTag<?>, PValue> outputs, List<PValue> sideInputs) {
-        String name = null;
-
-        ExecutorsBolt bolt = null;
-
-        boolean isGBK = false;
-        /**
-         * Check if the transform executor needs to be chained into an existing ExecutorsBolt.
-         * For following cases, a new bolt is created for the specified executor, otherwise the executor
-         * will be added into the bolt contains corresponding upstream executor.
-         * a) it is a GroupByKey executor
-         * b) it is connected to source directly
-         * c) None existing upstream bolt was found
-         * d) For the purpose of performance to reduce the side effects between multiple streams which
-         *    is output to same executor, a new bolt will be created.
-         */
-        if (RunnerUtils.isGroupByKeyExecutor(executor)) {
-            bolt = new ExecutorsBolt();
-            name = executionGraphContext.registerBolt(bolt);
-            isGBK = true;
-        } else if (connectedToSource()) {
-            bolt = new ExecutorsBolt();
-            name = executionGraphContext.registerBolt(bolt);
-        } else {
-            name = getUpstreamExecutorsBolt();
-            if (name == null) {
-                bolt = new ExecutorsBolt();
-                name = executionGraphContext.registerBolt(bolt);
-            } else {
-                bolt = executionGraphContext.getBolt(name);
-                if (isMultipleInputOrOutput(bolt, inputs)) {
-                    bolt = new ExecutorsBolt();
-                    name = executionGraphContext.registerBolt(bolt);
-                }
-            }
-        }
-
-        // update the output tags of current transform into ExecutorsBolt
-        for (Map.Entry<TupleTag<?>, PValue> entry : outputs.entrySet()) {
-            TupleTag tag = entry.getKey();
-            PValue value = entry.getValue();
-
-            // use tag of PValueBase
-            if (value instanceof PValueBase) {
-                tag = ((PValueBase) value).expand().keySet().iterator().next();
-            }
-            executionGraphContext.registerStreamProducer(
-                    TaggedPValue.of(tag, value),
-                    Stream.Producer.of(name, tag.getId(), value.getName()));
-            //bolt.addOutputTags(tag);
-        }
+    // update the output tags of current transform into ExecutorsBolt
+    for (Map.Entry<TupleTag<?>, PValue> entry : outputs.entrySet()) {
+      TupleTag tag = entry.getKey();
+      PValue value = entry.getValue();
+
+      // use tag of PValueBase
+      if (value instanceof PValueBase) {
+        tag = ((PValueBase) value).expand().keySet().iterator().next();
+      }
+      executionGraphContext.registerStreamProducer(
+          TaggedPValue.of(tag, value),
+          Stream.Producer.of(name, tag.getId(), value.getName()));
+      //bolt.addOutputTags(tag);
+    }
 
-        // add the transform executor into the chain of ExecutorsBolt
-        for (Map.Entry<TupleTag<?>, PValue> entry : inputs.entrySet()) {
-            TupleTag tag = entry.getKey();
-            PValue value = entry.getValue();
-            bolt.addExecutor(tag, executor);
-
-            // filter all connections inside bolt
-            //if (!bolt.getOutputTags().contains(tag)) {
-                Stream.Grouping grouping;
-                if (isGBK) {
-                    grouping = Stream.Grouping.byFields(Arrays.asList(CommonInstance.KEY));
-                } else {
-                    grouping = Stream.Grouping.of(Stream.Grouping.Type.LOCAL_OR_SHUFFLE);
-                }
-                addStormStreamDef(TaggedPValue.of(tag, value), name, grouping);
-            //}
-        }
+    // add the transform executor into the chain of ExecutorsBolt
+    for (Map.Entry<TupleTag<?>, PValue> entry : inputs.entrySet()) {
+      TupleTag tag = entry.getKey();
+      PValue value = entry.getValue();
+      bolt.addExecutor(tag, executor);
+
+      // filter all connections inside bolt
+      //if (!bolt.getOutputTags().contains(tag)) {
+      Stream.Grouping grouping;
+      if (isGBK) {
+        grouping = Stream.Grouping.byFields(Arrays.asList(CommonInstance.KEY));
+      } else {
+        grouping = Stream.Grouping.of(Stream.Grouping.Type.LOCAL_OR_SHUFFLE);
+      }
+      addStormStreamDef(TaggedPValue.of(tag, value), name, grouping);
+      //}
+    }
 
-        for (PValue sideInput : sideInputs) {
-            TupleTag tag = userGraphContext.findTupleTag(sideInput);
-            bolt.addExecutor(tag, executor);
-            checkState(!bolt.getOutputTags().contains(tag));
-            addStormStreamDef(TaggedPValue.of(tag, sideInput), name, Stream.Grouping.of(Stream.Grouping.Type.ALL));
-        }
+    for (PValue sideInput : sideInputs) {
+      TupleTag tag = userGraphContext.findTupleTag(sideInput);
+      bolt.addExecutor(tag, executor);
+      checkState(!bolt.getOutputTags().contains(tag));
+      addStormStreamDef(
+          TaggedPValue.of(tag, sideInput), name, Stream.Grouping.of(Stream.Grouping.Type.ALL));
+    }
 
-        bolt.registerExecutor(executor);
+    bolt.registerExecutor(executor);
 
-        // set parallelismNumber
-        String pTransformfullName = userGraphContext.currentTransform.getFullName();
-        String compositeName = pTransformfullName.split("/")[0];
-        Map parallelismNumMap = userGraphContext.getOptions().getParallelismNumMap();
-        if (parallelismNumMap.containsKey(compositeName)) {
-            int configNum = (Integer) parallelismNumMap.get(compositeName);
-            int currNum = bolt.getParallelismNum();
-            bolt.setParallelismNum(Math.max(configNum, currNum));
-        }
+    // set parallelismNumber
+    String pTransformfullName = userGraphContext.currentTransform.getFullName();
+    String compositeName = pTransformfullName.split("/")[0];
+    Map parallelismNumMap = userGraphContext.getOptions().getParallelismNumMap();
+    if (parallelismNumMap.containsKey(compositeName)) {
+      int configNum = (Integer) parallelismNumMap.get(compositeName);
+      int currNum = bolt.getParallelismNum();
+      bolt.setParallelismNum(Math.max(configNum, currNum));
     }
+  }
 
-    // TODO: add getSideInputs() and getSideOutputs().
-    public static class UserGraphContext {
-        private final JStormPipelineOptions options;
-        private final Map<PValue, TupleTag> pValueToTupleTag;
-        private AppliedPTransform<?, ?, ?> currentTransform = null;
+  // TODO: add getSideInputs() and getSideOutputs().
+  public static class UserGraphContext {
+    private final JStormPipelineOptions options;
+    private final Map<PValue, TupleTag> pValueToTupleTag;
+    private AppliedPTransform<?, ?, ?> currentTransform = null;
 
-        private boolean isWindowed = false;
+    private boolean isWindowed = false;
 
-        public UserGraphContext(JStormPipelineOptions options) {
-            this.options = checkNotNull(options, "options");
-            this.pValueToTupleTag = Maps.newHashMap();
-        }
+    public UserGraphContext(JStormPipelineOptions options) {
+      this.options = checkNotNull(options, "options");
+      this.pValueToTupleTag = Maps.newHashMap();
+    }
 
-        public JStormPipelineOptions getOptions() {
-            return this.options;
-        }
+    public JStormPipelineOptions getOptions() {
+      return this.options;
+    }
 
-        public void setCurrentTransform(AppliedPTransform<?, ?, ?> transform) {
-            this.currentTransform = transform;
-        }
+    public void setCurrentTransform(AppliedPTransform<?, ?, ?> transform) {
+      this.currentTransform = transform;
+    }
 
-        public String getStepName() {
-            return currentTransform.getFullName();
-        }
+    public String getStepName() {
+      return currentTransform.getFullName();
+    }
 
-        public <T extends PValue> T getInput() {
-            return (T) currentTransform.getInputs().values().iterator().next();
-        }
+    public <T extends PValue> T getInput() {
+      return (T) currentTransform.getInputs().values().iterator().next();
+    }
 
-        public Map<TupleTag<?>, PValue> getInputs() {
-            return currentTransform.getInputs();
-        }
+    public Map<TupleTag<?>, PValue> getInputs() {
+      return currentTransform.getInputs();
+    }
 
-        public TupleTag<?> getInputTag() {
-            return currentTransform.getInputs().keySet().iterator().next();
-        }
+    public TupleTag<?> getInputTag() {
+      return currentTransform.getInputs().keySet().iterator().next();
+    }
 
-        public List<TupleTag<?>> getInputTags() {
-            return Lists.newArrayList(currentTransform.getInputs().keySet());
-        }
+    public List<TupleTag<?>> getInputTags() {
+      return Lists.newArrayList(currentTransform.getInputs().keySet());
+    }
 
-        public <T extends PValue> T getOutput() {
-            return (T) currentTransform.getOutputs().values().iterator().next();
-        }
+    public <T extends PValue> T getOutput() {
+      return (T) currentTransform.getOutputs().values().iterator().next();
+    }
 
-        public Map<TupleTag<?>, PValue> getOutputs() {
-            return currentTransform.getOutputs();
-        }
+    public Map<TupleTag<?>, PValue> getOutputs() {
+      return currentTransform.getOutputs();
+    }
 
-        public TupleTag<?> getOutputTag() {
-            return currentTransform.getOutputs().keySet().iterator().next();
-        }
+    public TupleTag<?> getOutputTag() {
+      return currentTransform.getOutputs().keySet().iterator().next();
+    }
 
-        public List<TupleTag<?>> getOutputTags() {
-            return Lists.newArrayList(currentTransform.getOutputs().keySet());
-        }
+    public List<TupleTag<?>> getOutputTags() {
+      return Lists.newArrayList(currentTransform.getOutputs().keySet());
+    }
 
-        public void recordOutputTaggedPValue() {
-            for (Map.Entry<TupleTag<?>, PValue> entry : getOutputs().entrySet()) {
-                pValueToTupleTag.put(entry.getValue(), entry.getKey());
-            }
-        }
+    public void recordOutputTaggedPValue() {
+      for (Map.Entry<TupleTag<?>, PValue> entry : getOutputs().entrySet()) {
+        pValueToTupleTag.put(entry.getValue(), entry.getKey());
+      }
+    }
 
-        public <T> TupleTag<T> findTupleTag(PValue pValue) {
-            return pValueToTupleTag.get(checkNotNull(pValue, "pValue"));
-        }
+    public <T> TupleTag<T> findTupleTag(PValue pValue) {
+      return pValueToTupleTag.get(checkNotNull(pValue, "pValue"));
+    }
 
-        public void setWindowed() {
-            this.isWindowed = true;
-        }
+    public void setWindowed() {
+      this.isWindowed = true;
+    }
 
-        public boolean isWindowed() {
-            return this.isWindowed;
-        }
+    public boolean isWindowed() {
+      return this.isWindowed;
+    }
 
-        @Override
-        public String toString() {
-            return Joiner.on('\n').join(FluentIterable.from(pValueToTupleTag.entrySet())
-                    .transform(new Function<Map.Entry<PValue,TupleTag>, String>() {
-                        @Override
-                        public String apply(Map.Entry<PValue, TupleTag> entry) {
-                            return String.format("%s == %s", entry.getValue().getId(), entry.getKey().getName());
-                        }}));
-        }
+    @Override
+    public String toString() {
+      return Joiner.on('\n').join(FluentIterable.from(pValueToTupleTag.entrySet())
+          .transform(new Function<Map.Entry<PValue, TupleTag>, String>() {
+            @Override
+            public String apply(Map.Entry<PValue, TupleTag> entry) {
+              return String.format("%s == %s", entry.getValue().getId(), entry.getKey().getName());
+            }
+          }));
     }
+  }
 
-    public static class ExecutionGraphContext {
+  public static class ExecutionGraphContext {
 
-        private final Map<String, AdaptorBasicSpout> spoutMap = new HashMap<>();
-        private final Map<String, ExecutorsBolt> boltMap = new HashMap<>();
+    private final Map<String, AdaptorBasicSpout> spoutMap = new HashMap<>();
+    private final Map<String, ExecutorsBolt> boltMap = new HashMap<>();
 
-        // One-to-one mapping between Stream.Producer and TaggedPValue (or PValue).
-        private final Map<PValue, Stream.Producer> pValueToProducer = new HashMap<>();
-        private final Map<Stream.Producer, TaggedPValue> producerToTaggedPValue = new HashMap<>();
+    // One-to-one mapping between Stream.Producer and TaggedPValue (or PValue).
+    private final Map<PValue, Stream.Producer> pValueToProducer = new HashMap<>();
+    private final Map<Stream.Producer, TaggedPValue> producerToTaggedPValue = new HashMap<>();
 
-        private final List<Stream> streams = new ArrayList<>();
+    private final List<Stream> streams = new ArrayList<>();
 
-        private int id = 1;
+    private int id = 1;
 
-        public void registerSpout(AdaptorBasicSpout spout, TaggedPValue output) {
-            checkNotNull(spout, "spout");
-            checkNotNull(output, "output");
-            String name = "spout" + genId();
-            this.spoutMap.put(name, spout);
-            registerStreamProducer(
-                    output,
-                    Stream.Producer.of(name, output.getTag().getId(), output.getValue().getName()));
-        }
+    public void registerSpout(AdaptorBasicSpout spout, TaggedPValue output) {
+      checkNotNull(spout, "spout");
+      checkNotNull(output, "output");
+      String name = "spout" + genId();
+      this.spoutMap.put(name, spout);
+      registerStreamProducer(
+          output,
+          Stream.Producer.of(name, output.getTag().getId(), output.getValue().getName()));
+    }
 
-        public AdaptorBasicSpout getSpout(String id) {
-            if (Strings.isNullOrEmpty(id)) {
-                return null;
-            }
-            return this.spoutMap.get(id);
-        }
+    public AdaptorBasicSpout getSpout(String id) {
+      if (Strings.isNullOrEmpty(id)) {
+        return null;
+      }
+      return this.spoutMap.get(id);
+    }
 
-        public Map<String, AdaptorBasicSpout> getSpouts() {
-            return this.spoutMap;
-        }
+    public Map<String, AdaptorBasicSpout> getSpouts() {
+      return this.spoutMap;
+    }
 
-        public String registerBolt(ExecutorsBolt bolt) {
-            checkNotNull(bolt, "bolt");
-            String name = "bolt" + genId();
-            this.boltMap.put(name, bolt);
-            return name;
-        }
+    public String registerBolt(ExecutorsBolt bolt) {
+      checkNotNull(bolt, "bolt");
+      String name = "bolt" + genId();
+      this.boltMap.put(name, bolt);
+      return name;
+    }
 
-        public ExecutorsBolt getBolt(String id) {
-            if (Strings.isNullOrEmpty(id)) {
-                return null;
-            }
-            return this.boltMap.get(id);
-        }
+    public ExecutorsBolt getBolt(String id) {
+      if (Strings.isNullOrEmpty(id)) {
+        return null;
+      }
+      return this.boltMap.get(id);
+    }
 
-        public void registerStreamProducer(TaggedPValue taggedPValue, Stream.Producer producer) {
-            checkNotNull(taggedPValue, "taggedPValue");
-            checkNotNull(producer, "producer");
-            pValueToProducer.put(taggedPValue.getValue(), producer);
-            producerToTaggedPValue.put(producer, taggedPValue);
-        }
+    public void registerStreamProducer(TaggedPValue taggedPValue, Stream.Producer producer) {
+      checkNotNull(taggedPValue, "taggedPValue");
+      checkNotNull(producer, "producer");
+      pValueToProducer.put(taggedPValue.getValue(), producer);
+      producerToTaggedPValue.put(producer, taggedPValue);
+    }
 
-        public Stream.Producer getProducer(PValue pValue) {
-            return pValueToProducer.get(checkNotNull(pValue, "pValue"));
-        }
+    public Stream.Producer getProducer(PValue pValue) {
+      return pValueToProducer.get(checkNotNull(pValue, "pValue"));
+    }
 
-        public String getProducerComponentId(PValue pValue) {
-            Stream.Producer producer = getProducer(pValue);
-            return producer == null ? null : producer.getComponentId();
-        }
+    public String getProducerComponentId(PValue pValue) {
+      Stream.Producer producer = getProducer(pValue);
+      return producer == null ? null : producer.getComponentId();
+    }
 
-        public boolean producedBySpout(PValue pValue) {
-            String componentId = getProducerComponentId(pValue);
-            return getSpout(componentId) != null;
-        }
+    public boolean producedBySpout(PValue pValue) {
+      String componentId = getProducerComponentId(pValue);
+      return getSpout(componentId) != null;
+    }
 
-        public void registerStreamConsumer(Stream.Consumer consumer, Stream.Producer producer) {
-            streams.add(Stream.of(
-                    checkNotNull(producer, "producer"),
-                    checkNotNull(consumer, "consumer")));
-        }
+    public void registerStreamConsumer(Stream.Consumer consumer, Stream.Producer producer) {
+      streams.add(Stream.of(
+          checkNotNull(producer, "producer"),
+          checkNotNull(consumer, "consumer")));
+    }
 
-        public Map<PValue, Stream.Producer> getPValueToProducers() {
-            return pValueToProducer;
-        }
+    public Map<PValue, Stream.Producer> getPValueToProducers() {
+      return pValueToProducer;
+    }
 
-        public Iterable<Stream> getStreams() {
-            return streams;
-        }
+    public Iterable<Stream> getStreams() {
+      return streams;
+    }
 
-        @Override
-        public String toString() {
-            List<String> ret = new ArrayList<>();
-            ret.add("SPOUT");
-            for (Map.Entry<String, AdaptorBasicSpout> entry : spoutMap.entrySet()) {
-                ret.add(entry.getKey() + ": " + entry.getValue().toString());
-            }
-            ret.add("BOLT");
-            for (Map.Entry<String, ExecutorsBolt> entry : boltMap.entrySet()) {
-                ret.add(entry.getKey() + ": " + entry.getValue().toString());
-            }
-            ret.add("STREAM");
-            for (Stream stream : streams) {
-                ret.add(String.format(
-                        "%s@@%s ---> %s@@%s",
-                        stream.getProducer().getStreamId(),
-                        stream.getProducer().getComponentId(),
-                        stream.getConsumer().getGrouping(),
-                        stream.getConsumer().getComponentId()));
-            }
-            return Joiner.on("\n").join(ret);
-        }
+    @Override
+    public String toString() {
+      List<String> ret = new ArrayList<>();
+      ret.add("SPOUT");
+      for (Map.Entry<String, AdaptorBasicSpout> entry : spoutMap.entrySet()) {
+        ret.add(entry.getKey() + ": " + entry.getValue().toString());
+      }
+      ret.add("BOLT");
+      for (Map.Entry<String, ExecutorsBolt> entry : boltMap.entrySet()) {
+        ret.add(entry.getKey() + ": " + entry.getValue().toString());
+      }
+      ret.add("STREAM");
+      for (Stream stream : streams) {
+        ret.add(String.format(
+            "%s@@%s ---> %s@@%s",
+            stream.getProducer().getStreamId(),
+            stream.getProducer().getComponentId(),
+            stream.getConsumer().getGrouping(),
+            stream.getConsumer().getComponentId()));
+      }
+      return Joiner.on("\n").join(ret);
+    }
 
-        private synchronized int genId() {
-            return id++;
-        }
+    private synchronized int genId() {
+      return id++;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
index a33f07b..bce5b3e 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
@@ -17,6 +17,8 @@
  */
 package org.apache.beam.runners.jstorm.translation;
 
+import java.util.HashMap;
+import java.util.Map;
 import org.apache.beam.runners.jstorm.translation.translator.BoundedSourceTranslator;
 import org.apache.beam.runners.jstorm.translation.translator.FlattenTranslator;
 import org.apache.beam.runners.jstorm.translation.translator.GroupByKeyTranslator;
@@ -35,50 +37,49 @@ import org.apache.beam.sdk.transforms.windowing.Window;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.HashMap;
-import java.util.Map;
-
 /**
  * Lookup table mapping PTransform types to associated TransformTranslator implementations.
  */
 public class TranslatorRegistry {
-    private static final Logger LOG = LoggerFactory.getLogger(TranslatorRegistry.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TranslatorRegistry.class);
 
-    private static final Map<Class<? extends PTransform>, TransformTranslator> TRANSLATORS = new HashMap<>();
+  private static final Map<Class<? extends PTransform>, TransformTranslator> TRANSLATORS =
+      new HashMap<>();
 
-    static {
-        TRANSLATORS.put(Read.Bounded.class, new BoundedSourceTranslator());
-        TRANSLATORS.put(Read.Unbounded.class, new UnboundedSourceTranslator());
-        // TRANSLATORS.put(Write.Bound.class, new WriteSinkStreamingTranslator());
-        // TRANSLATORS.put(TextIO.Write.Bound.class, new TextIOWriteBoundStreamingTranslator());
+  static {
+    TRANSLATORS.put(Read.Bounded.class, new BoundedSourceTranslator());
+    TRANSLATORS.put(Read.Unbounded.class, new UnboundedSourceTranslator());
+    // TRANSLATORS.put(Write.Bound.class, new WriteSinkStreamingTranslator());
+    // TRANSLATORS.put(TextIO.Write.Bound.class, new TextIOWriteBoundStreamingTranslator());
 
-        TRANSLATORS.put(ParDo.SingleOutput.class, new ParDoBoundTranslator());
-        TRANSLATORS.put(ParDo.MultiOutput.class, new ParDoBoundMultiTranslator());
+    TRANSLATORS.put(ParDo.SingleOutput.class, new ParDoBoundTranslator());
+    TRANSLATORS.put(ParDo.MultiOutput.class, new ParDoBoundMultiTranslator());
 
-        //TRANSLATORS.put(Window.Bound.class, new WindowBoundTranslator<>());
-        TRANSLATORS.put(Window.Assign.class, new WindowAssignTranslator<>());
+    //TRANSLATORS.put(Window.Bound.class, new WindowBoundTranslator<>());
+    TRANSLATORS.put(Window.Assign.class, new WindowAssignTranslator<>());
 
-        TRANSLATORS.put(Flatten.PCollections.class, new FlattenTranslator());
+    TRANSLATORS.put(Flatten.PCollections.class, new FlattenTranslator());
 
-        TRANSLATORS.put(GroupByKey.class, new GroupByKeyTranslator());
+    TRANSLATORS.put(GroupByKey.class, new GroupByKeyTranslator());
 
-        TRANSLATORS.put(ViewTranslator.CreateJStormPCollectionView.class, new ViewTranslator());
+    TRANSLATORS.put(ViewTranslator.CreateJStormPCollectionView.class, new ViewTranslator());
 
-        /**
-         * Currently, empty translation is required for combine and reshuffle. Because, the transforms will be 
-         * mapped to GroupByKey and Pardo finally. So we only need to translator the finally transforms.
-         * If any improvement is required, the composite transforms will be translated in the future.
-         */
-        // TRANSLATORS.put(Combine.PerKey.class, new CombinePerKeyTranslator());
-        // TRANSLATORS.put(Globally.class, new CombineGloballyTranslator());
-        // TRANSLATORS.put(Reshuffle.class, new ReshuffleTranslator());
-    }
+    /**
+     * Currently, empty translation is required for combine and reshuffle.
+     * Because, the transforms will be mapped to GroupByKey and Pardo finally.
+     * So we only need to translator the finally transforms.
+     * If any improvement is required, the composite transforms will be translated in the future.
+     */
+    // TRANSLATORS.put(Combine.PerKey.class, new CombinePerKeyTranslator());
+    // TRANSLATORS.put(Globally.class, new CombineGloballyTranslator());
+    // TRANSLATORS.put(Reshuffle.class, new ReshuffleTranslator());
+  }
 
-    public static TransformTranslator<?> getTranslator(PTransform<?, ?> transform) {
-        TransformTranslator<?> translator = TRANSLATORS.get(transform.getClass());
-        if (translator == null) {
-            LOG.warn("Unsupported operator={}", transform.getClass().getName());
-        }
-        return translator;
+  public static TransformTranslator<?> getTranslator(PTransform<?, ?> transform) {
+    TransformTranslator<?> translator = TRANSLATORS.get(transform.getClass());
+    if (translator == null) {
+      LOG.warn("Unsupported operator={}", transform.getClass().getName());
     }
+    return translator;
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
index b07b426..68e9e17 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
@@ -17,54 +17,52 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
-
 import backtype.storm.topology.IComponent;
 import backtype.storm.topology.OutputFieldsDeclarer;
 import backtype.storm.tuple.Fields;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
 
 /*
  * Enable user to add output stream definitions by API, rather than hard-code.
  */
 public abstract class AbstractComponent implements IComponent {
-    private Map<String, Fields> streamToFields = new HashMap<>();
-    private Map<String, Boolean> keyStreams = new HashMap<>();
-    private int parallelismNum = 0;
+  private Map<String, Fields> streamToFields = new HashMap<>();
+  private Map<String, Boolean> keyStreams = new HashMap<>();
+  private int parallelismNum = 0;
 
-    public void addOutputField(String streamId) {
-        addOutputField(streamId, new Fields(CommonInstance.VALUE));
-    }
+  public void addOutputField(String streamId) {
+    addOutputField(streamId, new Fields(CommonInstance.VALUE));
+  }
 
-    public void addOutputField(String streamId, Fields fields) {
-        streamToFields.put(streamId, fields);
-        keyStreams.put(streamId, false);
-    }
+  public void addOutputField(String streamId, Fields fields) {
+    streamToFields.put(streamId, fields);
+    keyStreams.put(streamId, false);
+  }
 
-    public void addKVOutputField(String streamId) {
-        streamToFields.put(streamId, new Fields(CommonInstance.KEY, CommonInstance.VALUE));
-        keyStreams.put(streamId, true);
-    }
+  public void addKVOutputField(String streamId) {
+    streamToFields.put(streamId, new Fields(CommonInstance.KEY, CommonInstance.VALUE));
+    keyStreams.put(streamId, true);
+  }
 
-    @Override
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        for (Map.Entry<String, Fields> entry : streamToFields.entrySet()) {
-            declarer.declareStream(entry.getKey(), entry.getValue());
-        }
+  @Override
+  public void declareOutputFields(OutputFieldsDeclarer declarer) {
+    for (Map.Entry<String, Fields> entry : streamToFields.entrySet()) {
+      declarer.declareStream(entry.getKey(), entry.getValue());
     }
+  }
 
-    public boolean keyedEmit(String streamId) {
-        Boolean isKeyedStream = keyStreams.get(streamId);
-        return isKeyedStream == null ? false : isKeyedStream;
-    }
+  public boolean keyedEmit(String streamId) {
+    Boolean isKeyedStream = keyStreams.get(streamId);
+    return isKeyedStream == null ? false : isKeyedStream;
+  }
 
-    public int getParallelismNum() {
-        return parallelismNum;
-    }
+  public int getParallelismNum() {
+    return parallelismNum;
+  }
 
-    public void setParallelismNum(int num) {
-        parallelismNum = num;
-    }
+  public void setParallelismNum(int num) {
+    parallelismNum = num;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
index 91881f2..5e9b056 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
@@ -20,5 +20,5 @@ package org.apache.beam.runners.jstorm.translation.runtime;
 import backtype.storm.topology.IRichBatchBolt;
 
 public abstract class AdaptorBasicBolt extends AbstractComponent implements IRichBatchBolt {
-    
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
index 5a0c6ec..0480518 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
@@ -20,5 +20,5 @@ package org.apache.beam.runners.jstorm.translation.runtime;
 import backtype.storm.topology.IRichSpout;
 
 public abstract class AdaptorBasicSpout extends AbstractComponent implements IRichSpout {
-    
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
index c73a3b8..9507948 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
@@ -17,312 +17,319 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
-import java.io.Serializable;
-import java.util.*;
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
 
 import avro.shaded.com.google.common.collect.Iterables;
-import org.apache.beam.runners.jstorm.JStormPipelineOptions;
-import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
-import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
-
 import com.alibaba.jstorm.cache.IKvStoreManager;
 import com.alibaba.jstorm.metric.MetricClient;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
 import org.apache.beam.runners.core.DoFnRunner;
 import org.apache.beam.runners.core.DoFnRunners;
 import org.apache.beam.runners.core.DoFnRunners.OutputManager;
+import org.apache.beam.runners.core.NullSideInputReader;
 import org.apache.beam.runners.core.PushbackSideInputDoFnRunner;
 import org.apache.beam.runners.core.SideInputHandler;
 import org.apache.beam.runners.core.SimplePushbackSideInputDoFnRunner;
 import org.apache.beam.runners.core.StateInternals;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.runners.core.StateNamespaces;
 import org.apache.beam.runners.core.StateTag;
 import org.apache.beam.runners.core.StateTags;
 import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.runners.core.StateNamespaces;
+import org.apache.beam.runners.jstorm.JStormPipelineOptions;
+import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
+import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
+import org.apache.beam.runners.jstorm.translation.util.DefaultStepContext;
+import org.apache.beam.runners.jstorm.util.SerializedPipelineOptions;
 import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.state.BagState;
+import org.apache.beam.sdk.state.WatermarkHoldState;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.reflect.DoFnInvoker;
 import org.apache.beam.sdk.transforms.reflect.DoFnInvokers;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.runners.core.NullSideInputReader;
 import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.WatermarkHoldState;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.WindowingStrategy;
 import org.joda.time.Instant;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.beam.runners.jstorm.translation.util.DefaultStepContext;
-import org.apache.beam.runners.jstorm.util.SerializedPipelineOptions;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-
 public class DoFnExecutor<InputT, OutputT> implements Executor {
-    private static final long serialVersionUID = 5297603063991078668L;
+  private static final long serialVersionUID = 5297603063991078668L;
 
-    private static final Logger LOG = LoggerFactory.getLogger(DoFnExecutor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DoFnExecutor.class);
 
-    public class DoFnExecutorOutputManager implements OutputManager, Serializable {
-        private static final long serialVersionUID = -661113364735206170L;
+  public class DoFnExecutorOutputManager implements OutputManager, Serializable {
+    private static final long serialVersionUID = -661113364735206170L;
 
-        @Override
-        public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-            executorsBolt.processExecutorElem(tag, output);
-        }
-    }
-
-    protected transient DoFnRunner<InputT, OutputT> runner = null;
-    protected transient PushbackSideInputDoFnRunner<InputT, OutputT> pushbackRunner = null;
-
-    protected final String stepName;
-
-    protected int internalDoFnExecutorId;
-
-    protected final String description;
-
-    protected final TupleTag<OutputT> mainTupleTag;
-    protected final List<TupleTag<?>> sideOutputTags;
-
-    protected SerializedPipelineOptions serializedOptions;
-    protected transient JStormPipelineOptions pipelineOptions;
-
-    protected DoFn<InputT, OutputT> doFn;
-    protected final Coder<WindowedValue<InputT>> inputCoder;
-    protected DoFnInvoker<InputT, OutputT> doFnInvoker;
-    protected OutputManager outputManager;
-    protected WindowingStrategy<?, ?> windowingStrategy;
-    protected final TupleTag<InputT> mainInputTag;
-    protected Collection<PCollectionView<?>> sideInputs;
-    protected SideInputHandler sideInputHandler;
-    protected final Map<TupleTag, PCollectionView<?>> sideInputTagToView;
-
-    // Initialize during runtime
-    protected ExecutorContext executorContext;
-    protected ExecutorsBolt executorsBolt;
-    protected TimerInternals timerInternals;
-    protected transient StateInternals pushbackStateInternals;
-    protected transient StateTag<BagState<WindowedValue<InputT>>> pushedBackTag;
-    protected transient StateTag<WatermarkHoldState> watermarkHoldTag;
-    protected transient IKvStoreManager kvStoreManager;
-    protected DefaultStepContext stepContext;
-    protected transient MetricClient metricClient;
-
-    public DoFnExecutor(
-            String stepName,
-            String description,
-            JStormPipelineOptions pipelineOptions,
-            DoFn<InputT, OutputT> doFn,
-            Coder<WindowedValue<InputT>> inputCoder,
-            WindowingStrategy<?, ?> windowingStrategy,
-            TupleTag<InputT> mainInputTag,
-            Collection<PCollectionView<?>> sideInputs,
-            Map<TupleTag, PCollectionView<?>> sideInputTagToView,
-            TupleTag<OutputT> mainTupleTag,
-            List<TupleTag<?>> sideOutputTags) {
-        this.stepName = checkNotNull(stepName, "stepName");
-        this.description = checkNotNull(description, "description");
-        this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
-        this.doFn = doFn;
-        this.inputCoder = inputCoder;
-        this.outputManager = new DoFnExecutorOutputManager();
-        this.windowingStrategy = windowingStrategy;
-        this.mainInputTag = mainInputTag;
-        this.sideInputs = sideInputs;
-        this.mainTupleTag = mainTupleTag;
-        this.sideOutputTags = sideOutputTags;
-        this.sideInputTagToView = sideInputTagToView;
+    @Override
+    public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
+      executorsBolt.processExecutorElem(tag, output);
     }
-
-    protected DoFnRunner<InputT, OutputT> getDoFnRunner() {
-        return new DoFnRunnerWithMetrics<>(
-            stepName,
-            DoFnRunners.simpleRunner(
-                this.pipelineOptions,
-                this.doFn,
-                this.sideInputHandler == null ? NullSideInputReader.empty() : sideInputHandler,
-                this.outputManager,
-                this.mainTupleTag,
-                this.sideOutputTags,
-                this.stepContext,
-                this.windowingStrategy),
-            MetricsReporter.create(metricClient));
+  }
+
+  protected transient DoFnRunner<InputT, OutputT> runner = null;
+  protected transient PushbackSideInputDoFnRunner<InputT, OutputT> pushbackRunner = null;
+
+  protected final String stepName;
+
+  protected int internalDoFnExecutorId;
+
+  protected final String description;
+
+  protected final TupleTag<OutputT> mainTupleTag;
+  protected final List<TupleTag<?>> sideOutputTags;
+
+  protected SerializedPipelineOptions serializedOptions;
+  protected transient JStormPipelineOptions pipelineOptions;
+
+  protected DoFn<InputT, OutputT> doFn;
+  protected final Coder<WindowedValue<InputT>> inputCoder;
+  protected DoFnInvoker<InputT, OutputT> doFnInvoker;
+  protected OutputManager outputManager;
+  protected WindowingStrategy<?, ?> windowingStrategy;
+  protected final TupleTag<InputT> mainInputTag;
+  protected Collection<PCollectionView<?>> sideInputs;
+  protected SideInputHandler sideInputHandler;
+  protected final Map<TupleTag, PCollectionView<?>> sideInputTagToView;
+
+  // Initialize during runtime
+  protected ExecutorContext executorContext;
+  protected ExecutorsBolt executorsBolt;
+  protected TimerInternals timerInternals;
+  protected transient StateInternals pushbackStateInternals;
+  protected transient StateTag<BagState<WindowedValue<InputT>>> pushedBackTag;
+  protected transient StateTag<WatermarkHoldState> watermarkHoldTag;
+  protected transient IKvStoreManager kvStoreManager;
+  protected DefaultStepContext stepContext;
+  protected transient MetricClient metricClient;
+
+  public DoFnExecutor(
+      String stepName,
+      String description,
+      JStormPipelineOptions pipelineOptions,
+      DoFn<InputT, OutputT> doFn,
+      Coder<WindowedValue<InputT>> inputCoder,
+      WindowingStrategy<?, ?> windowingStrategy,
+      TupleTag<InputT> mainInputTag,
+      Collection<PCollectionView<?>> sideInputs,
+      Map<TupleTag, PCollectionView<?>> sideInputTagToView,
+      TupleTag<OutputT> mainTupleTag,
+      List<TupleTag<?>> sideOutputTags) {
+    this.stepName = checkNotNull(stepName, "stepName");
+    this.description = checkNotNull(description, "description");
+    this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
+    this.doFn = doFn;
+    this.inputCoder = inputCoder;
+    this.outputManager = new DoFnExecutorOutputManager();
+    this.windowingStrategy = windowingStrategy;
+    this.mainInputTag = mainInputTag;
+    this.sideInputs = sideInputs;
+    this.mainTupleTag = mainTupleTag;
+    this.sideOutputTags = sideOutputTags;
+    this.sideInputTagToView = sideInputTagToView;
+  }
+
+  protected DoFnRunner<InputT, OutputT> getDoFnRunner() {
+    return new DoFnRunnerWithMetrics<>(
+        stepName,
+        DoFnRunners.simpleRunner(
+            this.pipelineOptions,
+            this.doFn,
+            this.sideInputHandler == null ? NullSideInputReader.empty() : sideInputHandler,
+            this.outputManager,
+            this.mainTupleTag,
+            this.sideOutputTags,
+            this.stepContext,
+            this.windowingStrategy),
+        MetricsReporter.create(metricClient));
+  }
+
+  protected void initService(ExecutorContext context) {
+    // TODO: what should be set for key in here?
+    timerInternals = new JStormTimerInternals(
+        null /* key */, this, context.getExecutorsBolt().timerService());
+    kvStoreManager = context.getKvStoreManager();
+    stepContext = new DefaultStepContext(timerInternals,
+        new JStormStateInternals(
+            null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+    metricClient = new MetricClient(executorContext.getTopologyContext());
+  }
+
+  @Override
+  public void init(ExecutorContext context) {
+    this.executorContext = context;
+    this.executorsBolt = context.getExecutorsBolt();
+    this.pipelineOptions =
+        this.serializedOptions.getPipelineOptions().as(JStormPipelineOptions.class);
+
+    initService(context);
+
+    // Side inputs setup
+    if (sideInputs != null && sideInputs.isEmpty() == false) {
+      pushedBackTag = StateTags.bag("pushed-back-values", inputCoder);
+      watermarkHoldTag =
+          StateTags.watermarkStateInternal("hold", TimestampCombiner.EARLIEST);
+      pushbackStateInternals = new JStormStateInternals(
+          null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
+      sideInputHandler = new SideInputHandler(sideInputs, pushbackStateInternals);
+      runner = getDoFnRunner();
+      pushbackRunner =
+          SimplePushbackSideInputDoFnRunner.create(runner, sideInputs, sideInputHandler);
+    } else {
+      runner = getDoFnRunner();
     }
 
-    protected void initService(ExecutorContext context) {
-        // TODO: what should be set for key in here?
-        timerInternals = new JStormTimerInternals(null /* key */, this, context.getExecutorsBolt().timerService());
-        kvStoreManager = context.getKvStoreManager();
-        stepContext = new DefaultStepContext(timerInternals,
-                new JStormStateInternals(null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-        metricClient = new MetricClient(executorContext.getTopologyContext());
+    // Process user's setup
+    doFnInvoker = DoFnInvokers.invokerFor(doFn);
+    doFnInvoker.invokeSetup();
+  }
+
+  @Override
+  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+    LOG.debug(String.format("process: elemTag=%s, mainInputTag=%s, sideInputs=%s, elem={}",
+        tag, mainInputTag, sideInputs, elem.getValue()));
+    if (mainInputTag.equals(tag)) {
+      processMainInput(elem);
+    } else {
+      processSideInput(tag, elem);
     }
-
-    @Override
-    public void init(ExecutorContext context) {
-        this.executorContext = context;
-        this.executorsBolt = context.getExecutorsBolt();
-        this.pipelineOptions = this.serializedOptions.getPipelineOptions().as(JStormPipelineOptions.class);
-
-        initService(context);
-
-        // Side inputs setup
-        if (sideInputs != null && sideInputs.isEmpty() == false) {
-            pushedBackTag = StateTags.bag("pushed-back-values", inputCoder);
-            watermarkHoldTag =
-                    StateTags.watermarkStateInternal("hold", TimestampCombiner.EARLIEST);
-            pushbackStateInternals = new JStormStateInternals(null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
-            sideInputHandler = new SideInputHandler(sideInputs, pushbackStateInternals);
-            runner = getDoFnRunner();
-            pushbackRunner = SimplePushbackSideInputDoFnRunner.create(runner, sideInputs, sideInputHandler);
-        } else {
-            runner = getDoFnRunner();
+  }
+
+  protected <T> void processMainInput(WindowedValue<T> elem) {
+    if (sideInputs.isEmpty()) {
+      runner.processElement((WindowedValue<InputT>) elem);
+    } else {
+      Iterable<WindowedValue<InputT>> justPushedBack =
+          pushbackRunner.processElementInReadyWindows((WindowedValue<InputT>) elem);
+      BagState<WindowedValue<InputT>> pushedBack =
+          pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
+
+      Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
+      for (WindowedValue<InputT> pushedBackValue : justPushedBack) {
+        if (pushedBackValue.getTimestamp().isBefore(min)) {
+          min = pushedBackValue.getTimestamp();
         }
-
-        // Process user's setup
-        doFnInvoker = DoFnInvokers.invokerFor(doFn);
-        doFnInvoker.invokeSetup();
+        min = earlier(min, pushedBackValue.getTimestamp());
+        pushedBack.add(pushedBackValue);
+      }
+      pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag).add(min);
     }
+  }
 
-    @Override
-    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-        LOG.debug(String.format("process: elemTag=%s, mainInputTag=%s, sideInputs=%s, elem={}",
-                tag, mainInputTag, sideInputs, elem.getValue()));
-        if (mainInputTag.equals(tag)) {
-            processMainInput(elem);
-        } else {
-            processSideInput(tag, elem);
-        }
-    }
-
-    protected <T> void processMainInput(WindowedValue<T> elem) {
-       if (sideInputs.isEmpty()) {
-           runner.processElement((WindowedValue<InputT>) elem);
-       } else {
-           Iterable<WindowedValue<InputT>> justPushedBack =
-               pushbackRunner.processElementInReadyWindows((WindowedValue<InputT>) elem);
-           BagState<WindowedValue<InputT>> pushedBack =
-                   pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
-
-           Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
-           for (WindowedValue<InputT> pushedBackValue : justPushedBack) {
-               if (pushedBackValue.getTimestamp().isBefore(min)) {
-                   min = pushedBackValue.getTimestamp();
-               }
-               min = earlier(min, pushedBackValue.getTimestamp());
-               pushedBack.add(pushedBackValue);
-           }
-           pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag).add(min);
-       }
-    }
-
-    protected void processSideInput(TupleTag tag, WindowedValue elem) {
-        LOG.debug(String.format("side inputs: %s, %s.", tag, elem));
-
-        PCollectionView<?> sideInputView = sideInputTagToView.get(tag);
-        sideInputHandler.addSideInputValue(sideInputView, elem);
+  protected void processSideInput(TupleTag tag, WindowedValue elem) {
+    LOG.debug(String.format("side inputs: %s, %s.", tag, elem));
 
-        BagState<WindowedValue<InputT>> pushedBack =
-                pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
+    PCollectionView<?> sideInputView = sideInputTagToView.get(tag);
+    sideInputHandler.addSideInputValue(sideInputView, elem);
 
-        List<WindowedValue<InputT>> newPushedBack = new ArrayList<>();
+    BagState<WindowedValue<InputT>> pushedBack =
+        pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
 
-        Iterable<WindowedValue<InputT>> pushedBackInputs = pushedBack.read();
-        if (pushedBackInputs != null) {
-            for (WindowedValue<InputT> input : pushedBackInputs) {
+    List<WindowedValue<InputT>> newPushedBack = new ArrayList<>();
 
-                Iterable<WindowedValue<InputT>> justPushedBack =
-                        pushbackRunner.processElementInReadyWindows(input);
-                Iterables.addAll(newPushedBack, justPushedBack);
-            }
-        }
-        pushedBack.clear();
-
-        Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
-        for (WindowedValue<InputT> pushedBackValue : newPushedBack) {
-            min = earlier(min, pushedBackValue.getTimestamp());
-            pushedBack.add(pushedBackValue);
-        }
+    Iterable<WindowedValue<InputT>> pushedBackInputs = pushedBack.read();
+    if (pushedBackInputs != null) {
+      for (WindowedValue<InputT> input : pushedBackInputs) {
 
-        WatermarkHoldState watermarkHold =
-                pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
-        // TODO: clear-then-add is not thread-safe.
-        watermarkHold.clear();
-        watermarkHold.add(min);
+        Iterable<WindowedValue<InputT>> justPushedBack =
+            pushbackRunner.processElementInReadyWindows(input);
+        Iterables.addAll(newPushedBack, justPushedBack);
+      }
     }
+    pushedBack.clear();
 
-    /**
-     * Process all pushed back elements when receiving watermark with max timestamp
-     */
-    public void processAllPushBackElements() {
-        if (sideInputs != null && sideInputs.isEmpty() == false) {
-            BagState<WindowedValue<InputT>> pushedBackElements =
-                    pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
-            if (pushedBackElements != null) {
-                for (WindowedValue<InputT> elem : pushedBackElements.read()) {
-                    LOG.info("Process pushback elem={}", elem);
-                    runner.processElement(elem);
-                }
-                pushedBackElements.clear();
-            }
-
-            WatermarkHoldState watermarkHold =
-                    pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
-            watermarkHold.clear();
-            watermarkHold.add(BoundedWindow.TIMESTAMP_MAX_VALUE);
-        }
+    Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
+    for (WindowedValue<InputT> pushedBackValue : newPushedBack) {
+      min = earlier(min, pushedBackValue.getTimestamp());
+      pushedBack.add(pushedBackValue);
     }
 
-    public void onTimer(Object key, TimerInternals.TimerData timerData) {
-        StateNamespace namespace = timerData.getNamespace();
-        checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
-        BoundedWindow window = ((StateNamespaces.WindowNamespace) namespace).getWindow();
-        if (pushbackRunner != null) {
-            pushbackRunner.onTimer(timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
-        } else {
-            runner.onTimer(timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
+    WatermarkHoldState watermarkHold =
+        pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
+    // TODO: clear-then-add is not thread-safe.
+    watermarkHold.clear();
+    watermarkHold.add(min);
+  }
+
+  /**
+   * Process all pushed back elements when receiving watermark with max timestamp
+   */
+  public void processAllPushBackElements() {
+    if (sideInputs != null && sideInputs.isEmpty() == false) {
+      BagState<WindowedValue<InputT>> pushedBackElements =
+          pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
+      if (pushedBackElements != null) {
+        for (WindowedValue<InputT> elem : pushedBackElements.read()) {
+          LOG.info("Process pushback elem={}", elem);
+          runner.processElement(elem);
         }
-    }
+        pushedBackElements.clear();
+      }
 
-    @Override
-    public void cleanup() {
-        doFnInvoker.invokeTeardown();
+      WatermarkHoldState watermarkHold =
+          pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
+      watermarkHold.clear();
+      watermarkHold.add(BoundedWindow.TIMESTAMP_MAX_VALUE);
     }
-
-    @Override
-    public String toString() {
-        return description;
+  }
+
+  public void onTimer(Object key, TimerInternals.TimerData timerData) {
+    StateNamespace namespace = timerData.getNamespace();
+    checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
+    BoundedWindow window = ((StateNamespaces.WindowNamespace) namespace).getWindow();
+    if (pushbackRunner != null) {
+      pushbackRunner.onTimer(
+          timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
+    } else {
+      runner.onTimer(
+          timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
     }
-
-    private Instant earlier(Instant left, Instant right) {
-        return left.isBefore(right) ? left : right;
+  }
+
+  @Override
+  public void cleanup() {
+    doFnInvoker.invokeTeardown();
+  }
+
+  @Override
+  public String toString() {
+    return description;
+  }
+
+  private Instant earlier(Instant left, Instant right) {
+    return left.isBefore(right) ? left : right;
+  }
+
+  public void startBundle() {
+    if (pushbackRunner != null) {
+      pushbackRunner.startBundle();
+    } else {
+      runner.startBundle();
     }
+  }
 
-    public void startBundle() {
-        if (pushbackRunner != null) {
-            pushbackRunner.startBundle();
-        } else {
-            runner.startBundle();
-        }
+  public void finishBundle() {
+    if (pushbackRunner != null) {
+      pushbackRunner.finishBundle();
+    } else {
+      runner.finishBundle();
     }
+  }
 
-    public void finishBundle() {
-        if (pushbackRunner != null) {
-            pushbackRunner.finishBundle();
-        } else {
-            runner.finishBundle();
-        }
-    }
+  public void setInternalDoFnExecutorId(int id) {
+    this.internalDoFnExecutorId = id;
+  }
 
-    public void setInternalDoFnExecutorId(int id) {
-        this.internalDoFnExecutorId = id;
-    }
-
-    public int getInternalDoFnExecutorId() {
-        return internalDoFnExecutorId;
-    }
+  public int getInternalDoFnExecutorId() {
+    return internalDoFnExecutorId;
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java
index 98dbcc5..1610a8a 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java
@@ -68,7 +68,8 @@ public class DoFnRunnerWithMetrics<InputT, OutputT> implements DoFnRunner<InputT
   }
 
   @Override
-  public void onTimer(String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) {
+  public void onTimer(
+      String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) {
     try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
         metricsReporter.getMetricsContainer(stepName))) {
       delegate.onTimer(timerId, window, timestamp, timeDomain);

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
index d7214db..1a03cb8 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
@@ -18,17 +18,16 @@
 package org.apache.beam.runners.jstorm.translation.runtime;
 
 import java.io.Serializable;
-
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.TupleTag;
 
 public interface Executor extends Serializable {
-    /**
-     * Initialization during runtime
-     */
-    void init(ExecutorContext context);
+  /**
+   * Initialization during runtime
+   */
+  void init(ExecutorContext context);
 
-    <T> void  process(TupleTag<T> tag, WindowedValue<T> elem);
+  <T> void process(TupleTag<T> tag, WindowedValue<T> elem);
 
-    void cleanup();
+  void cleanup();
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
index 1de881f..1f65921 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
@@ -23,13 +23,16 @@ import com.google.auto.value.AutoValue;
 
 @AutoValue
 public abstract class ExecutorContext {
-    public static ExecutorContext of(TopologyContext topologyContext, ExecutorsBolt bolt, IKvStoreManager kvStoreManager) {
-        return new AutoValue_ExecutorContext(topologyContext, bolt, kvStoreManager);
-    }
+  public static ExecutorContext of(
+      TopologyContext topologyContext,
+      ExecutorsBolt bolt,
+      IKvStoreManager kvStoreManager) {
+    return new AutoValue_ExecutorContext(topologyContext, bolt, kvStoreManager);
+  }
 
-    public abstract TopologyContext getTopologyContext();
+  public abstract TopologyContext getTopologyContext();
 
-    public abstract ExecutorsBolt getExecutorsBolt();
+  public abstract ExecutorsBolt getExecutorsBolt();
 
-    public abstract IKvStoreManager getKvStoreManager();
+  public abstract IKvStoreManager getKvStoreManager();
 }


[25/53] [abbrv] beam git commit: jstorm-runner: move most classes to translation package and reduece their visibility to package private.

Posted by pe...@apache.org.
jstorm-runner: move most classes to translation package and reduece their visibility to package private.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/82653534
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/82653534
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/82653534

Branch: refs/heads/jstorm-runner
Commit: 82653534b0b738ee84ed94a67f9344393778d033
Parents: 9309ac4
Author: Pei He <pe...@apache.org>
Authored: Fri Jul 14 15:28:53 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:57 2017 +0800

----------------------------------------------------------------------
 .../beam/runners/jstorm/JStormRunner.java       |  14 +-
 .../jstorm/serialization/package-info.java      |  22 ++
 .../jstorm/translation/AbstractComponent.java   |  67 ++++
 .../translation/BoundedSourceTranslator.java    |  49 +++
 .../jstorm/translation/CommonInstance.java      |  28 ++
 .../jstorm/translation/DefaultStepContext.java  |  90 +++++
 .../jstorm/translation/DoFnExecutor.java        | 339 +++++++++++++++++
 .../translation/DoFnRunnerWithMetrics.java      |  91 +++++
 .../runners/jstorm/translation/Executor.java    |  36 ++
 .../jstorm/translation/ExecutorContext.java     |  41 ++
 .../jstorm/translation/ExecutorsBolt.java       | 338 +++++++++++++++++
 .../jstorm/translation/FlattenExecutor.java     |  60 +++
 .../jstorm/translation/FlattenTranslator.java   |  49 +++
 .../translation/GroupByKeyTranslator.java       |  71 ++++
 .../translation/GroupByWindowExecutor.java      | 173 +++++++++
 .../jstorm/translation/JStormBagState.java      | 180 +++++++++
 .../translation/JStormCombiningState.java       |  88 +++++
 .../jstorm/translation/JStormMapState.java      | 158 ++++++++
 .../translation/JStormPipelineTranslator.java   |   2 -
 .../translation/JStormStateInternals.java       | 190 ++++++++++
 .../translation/JStormTimerInternals.java       |  97 +++++
 .../jstorm/translation/JStormValueState.java    |  82 ++++
 .../translation/JStormWatermarkHoldState.java   |  82 ++++
 .../jstorm/translation/MetricsReporter.java     |  87 +++++
 .../translation/MultiOutputDoFnExecutor.java    |  79 ++++
 .../translation/MultiStatefulDoFnExecutor.java  |  70 ++++
 .../translation/ParDoBoundMultiTranslator.java  | 114 ++++++
 .../translation/ParDoBoundTranslator.java       | 107 ++++++
 .../runners/jstorm/translation/RunnerUtils.java |  51 +++
 .../translation/SerializedPipelineOptions.java  |  65 ++++
 .../translation/SingletonKeyedWorkItem.java     |  62 +++
 .../translation/StatefulDoFnExecutor.java       |  68 ++++
 .../beam/runners/jstorm/translation/Stream.java | 104 +++++
 .../jstorm/translation/TimerService.java        |  51 +++
 .../jstorm/translation/TimerServiceImpl.java    | 155 ++++++++
 .../jstorm/translation/TransformTranslator.java |  79 ++++
 .../jstorm/translation/TranslationContext.java  |   6 -
 .../jstorm/translation/TranslatorRegistry.java  |  11 +-
 .../jstorm/translation/TxExecutorsBolt.java     | 133 +++++++
 .../translation/TxUnboundedSourceSpout.java     | 156 ++++++++
 .../translation/UnboundedSourceSpout.java       | 189 +++++++++
 .../translation/UnboundedSourceTranslator.java  |  44 +++
 .../jstorm/translation/ViewExecutor.java        |  56 +++
 .../jstorm/translation/ViewTranslator.java      | 378 ++++++++++++++++++
 .../translation/WindowAssignExecutor.java       | 112 ++++++
 .../translation/WindowAssignTranslator.java     |  41 ++
 .../jstorm/translation/package-info.java        |  22 ++
 .../translation/runtime/AbstractComponent.java  |  68 ----
 .../translation/runtime/DoFnExecutor.java       | 343 -----------------
 .../runtime/DoFnRunnerWithMetrics.java          |  91 -----
 .../jstorm/translation/runtime/Executor.java    |  36 --
 .../translation/runtime/ExecutorContext.java    |  41 --
 .../translation/runtime/ExecutorsBolt.java      | 339 -----------------
 .../translation/runtime/FlattenExecutor.java    |  60 ---
 .../runtime/GroupByWindowExecutor.java          | 177 ---------
 .../translation/runtime/MetricsReporter.java    |  87 -----
 .../runtime/MultiOutputDoFnExecutor.java        |  79 ----
 .../runtime/MultiStatefulDoFnExecutor.java      |  72 ----
 .../runtime/StatefulDoFnExecutor.java           |  70 ----
 .../translation/runtime/TimerService.java       |  51 ---
 .../translation/runtime/TimerServiceImpl.java   | 155 --------
 .../translation/runtime/TxExecutorsBolt.java    | 133 -------
 .../runtime/TxUnboundedSourceSpout.java         | 156 --------
 .../runtime/UnboundedSourceSpout.java           | 191 ----------
 .../translation/runtime/ViewExecutor.java       |  56 ---
 .../runtime/WindowAssignExecutor.java           | 112 ------
 .../runtime/state/JStormBagState.java           | 180 ---------
 .../runtime/state/JStormCombiningState.java     |  88 -----
 .../runtime/state/JStormMapState.java           | 158 --------
 .../runtime/state/JStormStateInternals.java     | 191 ----------
 .../runtime/state/JStormValueState.java         |  82 ----
 .../runtime/state/JStormWatermarkHoldState.java |  83 ----
 .../runtime/timer/JStormTimerInternals.java     | 100 -----
 .../translator/BoundedSourceTranslator.java     |  51 ---
 .../translator/FlattenTranslator.java           |  51 ---
 .../translator/GroupByKeyTranslator.java        |  73 ----
 .../translator/ParDoBoundMultiTranslator.java   | 118 ------
 .../translator/ParDoBoundTranslator.java        | 110 ------
 .../jstorm/translation/translator/Stream.java   | 104 -----
 .../translator/TransformTranslator.java         |  80 ----
 .../translator/UnboundedSourceTranslator.java   |  46 ---
 .../translation/translator/ViewTranslator.java  | 380 -------------------
 .../translator/WindowAssignTranslator.java      |  43 ---
 .../jstorm/translation/util/CommonInstance.java |  28 --
 .../translation/util/DefaultStepContext.java    |  90 -----
 .../beam/runners/jstorm/util/RunnerUtils.java   |  55 ---
 .../jstorm/util/SerializedPipelineOptions.java  |  65 ----
 .../jstorm/util/SingletonKeyedWorkItem.java     |  62 ---
 .../translation/JStormStateInternalsTest.java   | 221 +++++++++++
 .../runtime/state/JStormStateInternalsTest.java | 222 -----------
 90 files changed, 4783 insertions(+), 4802 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
index 8782130..baf4e5a 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
@@ -38,15 +38,15 @@ import org.apache.beam.runners.jstorm.serialization.KvStoreIterableSerializer;
 import org.apache.beam.runners.jstorm.serialization.SdkRepackImmuListSerializer;
 import org.apache.beam.runners.jstorm.serialization.SdkRepackImmuSetSerializer;
 import org.apache.beam.runners.jstorm.serialization.UnmodifiableCollectionsSerializer;
+import org.apache.beam.runners.jstorm.translation.AbstractComponent;
+import org.apache.beam.runners.jstorm.translation.CommonInstance;
+import org.apache.beam.runners.jstorm.translation.ExecutorsBolt;
 import org.apache.beam.runners.jstorm.translation.JStormPipelineTranslator;
+import org.apache.beam.runners.jstorm.translation.Stream;
 import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.runtime.AbstractComponent;
-import org.apache.beam.runners.jstorm.translation.runtime.ExecutorsBolt;
-import org.apache.beam.runners.jstorm.translation.runtime.TxExecutorsBolt;
-import org.apache.beam.runners.jstorm.translation.runtime.TxUnboundedSourceSpout;
-import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
-import org.apache.beam.runners.jstorm.translation.translator.Stream;
-import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
+import org.apache.beam.runners.jstorm.translation.TxExecutorsBolt;
+import org.apache.beam.runners.jstorm.translation.TxUnboundedSourceSpout;
+import org.apache.beam.runners.jstorm.translation.UnboundedSourceSpout;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.PipelineRunner;
 import org.apache.beam.sdk.options.PipelineOptions;

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/package-info.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/package-info.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/package-info.java
new file mode 100644
index 0000000..f5ac931
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Implementation of kryo serializers.
+ */
+package org.apache.beam.runners.jstorm.serialization;

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/AbstractComponent.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/AbstractComponent.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/AbstractComponent.java
new file mode 100644
index 0000000..35ae88d
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/AbstractComponent.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import backtype.storm.topology.IComponent;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Enable user to add output stream definitions by API, rather than hard-code.
+ */
+public abstract class AbstractComponent implements IComponent {
+  private Map<String, Fields> streamToFields = new HashMap<>();
+  private Map<String, Boolean> keyStreams = new HashMap<>();
+  private int parallelismNum = 0;
+
+  public void addOutputField(String streamId) {
+    addOutputField(streamId, new Fields(CommonInstance.VALUE));
+  }
+
+  public void addOutputField(String streamId, Fields fields) {
+    streamToFields.put(streamId, fields);
+    keyStreams.put(streamId, false);
+  }
+
+  public void addKVOutputField(String streamId) {
+    streamToFields.put(streamId, new Fields(CommonInstance.KEY, CommonInstance.VALUE));
+    keyStreams.put(streamId, true);
+  }
+
+  @Override
+  public void declareOutputFields(OutputFieldsDeclarer declarer) {
+    for (Map.Entry<String, Fields> entry : streamToFields.entrySet()) {
+      declarer.declareStream(entry.getKey(), entry.getValue());
+    }
+  }
+
+  public boolean keyedEmit(String streamId) {
+    Boolean isKeyedStream = keyStreams.get(streamId);
+    return isKeyedStream == null ? false : isKeyedStream;
+  }
+
+  public int getParallelismNum() {
+    return parallelismNum;
+  }
+
+  public void setParallelismNum(int num) {
+    parallelismNum = num;
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java
new file mode 100644
index 0000000..f64193e
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import org.apache.beam.runners.core.construction.UnboundedReadFromBoundedSource;
+import org.apache.beam.sdk.io.Read;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TaggedPValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ * Translates a {@link Read.Bounded} into a Storm spout.
+ *
+ * @param <T>
+ */
+class BoundedSourceTranslator<T> extends TransformTranslator.Default<Read.Bounded<T>> {
+
+  @Override
+  public void translateNode(Read.Bounded<T> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    String description =
+        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+
+    TupleTag<?> outputTag = userGraphContext.getOutputTag();
+    PValue outputValue = userGraphContext.getOutput();
+    UnboundedSourceSpout spout = new UnboundedSourceSpout(
+        description,
+        new UnboundedReadFromBoundedSource.BoundedToUnboundedSourceAdapter(transform.getSource()),
+        userGraphContext.getOptions(), outputTag);
+
+    context.getExecutionGraphContext().registerSpout(
+        spout, TaggedPValue.of(outputTag, outputValue));
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/CommonInstance.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/CommonInstance.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/CommonInstance.java
new file mode 100644
index 0000000..b7154cd
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/CommonInstance.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+/**
+ * Common definition of JStorm runner.
+ */
+public class CommonInstance {
+  public static final String KEY = "Key";
+  public static final String VALUE = "Value";
+
+  public static final String BEAM_WATERMARK_STREAM_ID = "BEAM_WATERMARK";
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DefaultStepContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DefaultStepContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DefaultStepContext.java
new file mode 100644
index 0000000..9fd584b
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DefaultStepContext.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import java.io.IOException;
+import org.apache.beam.runners.core.ExecutionContext;
+import org.apache.beam.runners.core.StateInternals;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ * Default StepContext for running DoFn This does not allow accessing state or timer internals.
+ */
+class DefaultStepContext implements ExecutionContext.StepContext {
+
+  private TimerInternals timerInternals;
+
+  private StateInternals stateInternals;
+
+  public DefaultStepContext(TimerInternals timerInternals, StateInternals stateInternals) {
+    this.timerInternals = checkNotNull(timerInternals, "timerInternals");
+    this.stateInternals = checkNotNull(stateInternals, "stateInternals");
+  }
+
+  @Override
+  public String getStepName() {
+    return null;
+  }
+
+  @Override
+  public String getTransformName() {
+    return null;
+  }
+
+  @Override
+  public void noteOutput(WindowedValue<?> windowedValue) {
+
+  }
+
+  @Override
+  public void noteOutput(TupleTag<?> tupleTag, WindowedValue<?> windowedValue) {
+
+  }
+
+  @Override
+  public <T, W extends BoundedWindow> void writePCollectionViewData(
+      TupleTag<?> tag, Iterable<WindowedValue<T>> data,
+      Coder<Iterable<WindowedValue<T>>> dataCoder, W window, Coder<W> windowCoder)
+      throws IOException {
+    throw new UnsupportedOperationException("Writing side-input data is not supported.");
+  }
+
+  @Override
+  public StateInternals stateInternals() {
+    return stateInternals;
+  }
+
+  @Override
+  public TimerInternals timerInternals() {
+    return timerInternals;
+  }
+
+  public void setStateInternals(StateInternals stateInternals) {
+    this.stateInternals = stateInternals;
+  }
+
+  public void setTimerInternals(TimerInternals timerInternals) {
+    this.timerInternals = timerInternals;
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
new file mode 100644
index 0000000..fdd9af6
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
@@ -0,0 +1,339 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.alibaba.jstorm.metric.MetricClient;
+import com.google.common.collect.Iterables;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.runners.core.DoFnRunner;
+import org.apache.beam.runners.core.DoFnRunners;
+import org.apache.beam.runners.core.DoFnRunners.OutputManager;
+import org.apache.beam.runners.core.NullSideInputReader;
+import org.apache.beam.runners.core.PushbackSideInputDoFnRunner;
+import org.apache.beam.runners.core.SideInputHandler;
+import org.apache.beam.runners.core.SimplePushbackSideInputDoFnRunner;
+import org.apache.beam.runners.core.StateInternals;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.runners.core.StateNamespaces;
+import org.apache.beam.runners.core.StateTag;
+import org.apache.beam.runners.core.StateTags;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.runners.jstorm.JStormPipelineOptions;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.state.BagState;
+import org.apache.beam.sdk.state.WatermarkHoldState;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.reflect.DoFnInvoker;
+import org.apache.beam.sdk.transforms.reflect.DoFnInvokers;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.WindowingStrategy;
+import org.joda.time.Instant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * JStorm {@link Executor} for {@link DoFn}.
+ * @param <InputT> input type
+ * @param <OutputT> output type
+ */
+class DoFnExecutor<InputT, OutputT> implements Executor {
+  private static final long serialVersionUID = 5297603063991078668L;
+
+  private static final Logger LOG = LoggerFactory.getLogger(DoFnExecutor.class);
+
+  /**
+   * Implements {@link OutputManager} in a DoFn executor.
+   */
+  public class DoFnExecutorOutputManager implements OutputManager, Serializable {
+    private static final long serialVersionUID = -661113364735206170L;
+
+    @Override
+    public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
+      executorsBolt.processExecutorElem(tag, output);
+    }
+  }
+
+  protected transient DoFnRunner<InputT, OutputT> runner = null;
+  protected transient PushbackSideInputDoFnRunner<InputT, OutputT> pushbackRunner = null;
+
+  protected final String stepName;
+
+  protected int internalDoFnExecutorId;
+
+  protected final String description;
+
+  protected final TupleTag<OutputT> mainTupleTag;
+  protected final List<TupleTag<?>> sideOutputTags;
+
+  protected SerializedPipelineOptions serializedOptions;
+  protected transient JStormPipelineOptions pipelineOptions;
+
+  protected DoFn<InputT, OutputT> doFn;
+  protected final Coder<WindowedValue<InputT>> inputCoder;
+  protected DoFnInvoker<InputT, OutputT> doFnInvoker;
+  protected OutputManager outputManager;
+  protected WindowingStrategy<?, ?> windowingStrategy;
+  protected final TupleTag<InputT> mainInputTag;
+  protected Collection<PCollectionView<?>> sideInputs;
+  protected SideInputHandler sideInputHandler;
+  protected final Map<TupleTag, PCollectionView<?>> sideInputTagToView;
+
+  // Initialize during runtime
+  protected ExecutorContext executorContext;
+  protected ExecutorsBolt executorsBolt;
+  protected TimerInternals timerInternals;
+  protected transient StateInternals pushbackStateInternals;
+  protected transient StateTag<BagState<WindowedValue<InputT>>> pushedBackTag;
+  protected transient StateTag<WatermarkHoldState> watermarkHoldTag;
+  protected transient IKvStoreManager kvStoreManager;
+  protected DefaultStepContext stepContext;
+  protected transient MetricClient metricClient;
+
+  public DoFnExecutor(
+      String stepName,
+      String description,
+      JStormPipelineOptions pipelineOptions,
+      DoFn<InputT, OutputT> doFn,
+      Coder<WindowedValue<InputT>> inputCoder,
+      WindowingStrategy<?, ?> windowingStrategy,
+      TupleTag<InputT> mainInputTag,
+      Collection<PCollectionView<?>> sideInputs,
+      Map<TupleTag, PCollectionView<?>> sideInputTagToView,
+      TupleTag<OutputT> mainTupleTag,
+      List<TupleTag<?>> sideOutputTags) {
+    this.stepName = checkNotNull(stepName, "stepName");
+    this.description = checkNotNull(description, "description");
+    this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
+    this.doFn = doFn;
+    this.inputCoder = inputCoder;
+    this.outputManager = new DoFnExecutorOutputManager();
+    this.windowingStrategy = windowingStrategy;
+    this.mainInputTag = mainInputTag;
+    this.sideInputs = sideInputs;
+    this.mainTupleTag = mainTupleTag;
+    this.sideOutputTags = sideOutputTags;
+    this.sideInputTagToView = sideInputTagToView;
+  }
+
+  protected DoFnRunner<InputT, OutputT> getDoFnRunner() {
+    return new DoFnRunnerWithMetrics<>(
+        stepName,
+        DoFnRunners.simpleRunner(
+            this.pipelineOptions,
+            this.doFn,
+            this.sideInputHandler == null ? NullSideInputReader.empty() : sideInputHandler,
+            this.outputManager,
+            this.mainTupleTag,
+            this.sideOutputTags,
+            this.stepContext,
+            this.windowingStrategy),
+        MetricsReporter.create(metricClient));
+  }
+
+  protected void initService(ExecutorContext context) {
+    // TODO: what should be set for key in here?
+    timerInternals = new JStormTimerInternals(
+        null /* key */, this, context.getExecutorsBolt().timerService());
+    kvStoreManager = context.getKvStoreManager();
+    stepContext = new DefaultStepContext(timerInternals,
+        new JStormStateInternals(
+            null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+    metricClient = new MetricClient(executorContext.getTopologyContext());
+  }
+
+  @Override
+  public void init(ExecutorContext context) {
+    this.executorContext = context;
+    this.executorsBolt = context.getExecutorsBolt();
+    this.pipelineOptions =
+        this.serializedOptions.getPipelineOptions().as(JStormPipelineOptions.class);
+
+    initService(context);
+
+    // Side inputs setup
+    if (sideInputs != null && !sideInputs.isEmpty()) {
+      pushedBackTag = StateTags.bag("pushed-back-values", inputCoder);
+      watermarkHoldTag =
+          StateTags.watermarkStateInternal("hold", TimestampCombiner.EARLIEST);
+      pushbackStateInternals = new JStormStateInternals(
+          null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
+      sideInputHandler = new SideInputHandler(sideInputs, pushbackStateInternals);
+      runner = getDoFnRunner();
+      pushbackRunner =
+          SimplePushbackSideInputDoFnRunner.create(runner, sideInputs, sideInputHandler);
+    } else {
+      runner = getDoFnRunner();
+    }
+
+    // Process user's setup
+    doFnInvoker = DoFnInvokers.invokerFor(doFn);
+    doFnInvoker.invokeSetup();
+  }
+
+  @Override
+  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+    LOG.debug(String.format("process: elemTag=%s, mainInputTag=%s, sideInputs=%s, elem={}",
+        tag, mainInputTag, sideInputs, elem.getValue()));
+    if (mainInputTag.equals(tag)) {
+      processMainInput(elem);
+    } else {
+      processSideInput(tag, elem);
+    }
+  }
+
+  protected <T> void processMainInput(WindowedValue<T> elem) {
+    if (sideInputs.isEmpty()) {
+      runner.processElement((WindowedValue<InputT>) elem);
+    } else {
+      Iterable<WindowedValue<InputT>> justPushedBack =
+          pushbackRunner.processElementInReadyWindows((WindowedValue<InputT>) elem);
+      BagState<WindowedValue<InputT>> pushedBack =
+          pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
+
+      Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
+      for (WindowedValue<InputT> pushedBackValue : justPushedBack) {
+        if (pushedBackValue.getTimestamp().isBefore(min)) {
+          min = pushedBackValue.getTimestamp();
+        }
+        min = earlier(min, pushedBackValue.getTimestamp());
+        pushedBack.add(pushedBackValue);
+      }
+      pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag).add(min);
+    }
+  }
+
+  protected void processSideInput(TupleTag tag, WindowedValue elem) {
+    LOG.debug(String.format("side inputs: %s, %s.", tag, elem));
+
+    PCollectionView<?> sideInputView = sideInputTagToView.get(tag);
+    sideInputHandler.addSideInputValue(sideInputView, elem);
+
+    BagState<WindowedValue<InputT>> pushedBack =
+        pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
+
+    List<WindowedValue<InputT>> newPushedBack = new ArrayList<>();
+
+    Iterable<WindowedValue<InputT>> pushedBackInputs = pushedBack.read();
+    if (pushedBackInputs != null) {
+      for (WindowedValue<InputT> input : pushedBackInputs) {
+
+        Iterable<WindowedValue<InputT>> justPushedBack =
+            pushbackRunner.processElementInReadyWindows(input);
+        Iterables.addAll(newPushedBack, justPushedBack);
+      }
+    }
+    pushedBack.clear();
+
+    Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
+    for (WindowedValue<InputT> pushedBackValue : newPushedBack) {
+      min = earlier(min, pushedBackValue.getTimestamp());
+      pushedBack.add(pushedBackValue);
+    }
+
+    WatermarkHoldState watermarkHold =
+        pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
+    // TODO: clear-then-add is not thread-safe.
+    watermarkHold.clear();
+    watermarkHold.add(min);
+  }
+
+  /**
+   * Process all pushed back elements when receiving watermark with max timestamp.
+   */
+  public void processAllPushBackElements() {
+    if (sideInputs != null && !sideInputs.isEmpty()) {
+      BagState<WindowedValue<InputT>> pushedBackElements =
+          pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
+      if (pushedBackElements != null) {
+        for (WindowedValue<InputT> elem : pushedBackElements.read()) {
+          LOG.info("Process pushback elem={}", elem);
+          runner.processElement(elem);
+        }
+        pushedBackElements.clear();
+      }
+
+      WatermarkHoldState watermarkHold =
+          pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
+      watermarkHold.clear();
+      watermarkHold.add(BoundedWindow.TIMESTAMP_MAX_VALUE);
+    }
+  }
+
+  public void onTimer(Object key, TimerInternals.TimerData timerData) {
+    StateNamespace namespace = timerData.getNamespace();
+    checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
+    BoundedWindow window = ((StateNamespaces.WindowNamespace) namespace).getWindow();
+    if (pushbackRunner != null) {
+      pushbackRunner.onTimer(
+          timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
+    } else {
+      runner.onTimer(
+          timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
+    }
+  }
+
+  @Override
+  public void cleanup() {
+    doFnInvoker.invokeTeardown();
+  }
+
+  @Override
+  public String toString() {
+    return description;
+  }
+
+  private Instant earlier(Instant left, Instant right) {
+    return left.isBefore(right) ? left : right;
+  }
+
+  public void startBundle() {
+    if (pushbackRunner != null) {
+      pushbackRunner.startBundle();
+    } else {
+      runner.startBundle();
+    }
+  }
+
+  public void finishBundle() {
+    if (pushbackRunner != null) {
+      pushbackRunner.finishBundle();
+    } else {
+      runner.finishBundle();
+    }
+  }
+
+  public void setInternalDoFnExecutorId(int id) {
+    this.internalDoFnExecutorId = id;
+  }
+
+  public int getInternalDoFnExecutorId() {
+    return internalDoFnExecutorId;
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnRunnerWithMetrics.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnRunnerWithMetrics.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnRunnerWithMetrics.java
new file mode 100644
index 0000000..f614f1c
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnRunnerWithMetrics.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import java.io.Closeable;
+import java.io.IOException;
+import org.apache.beam.runners.core.DoFnRunner;
+import org.apache.beam.sdk.metrics.MetricsContainer;
+import org.apache.beam.sdk.metrics.MetricsEnvironment;
+import org.apache.beam.sdk.state.TimeDomain;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.joda.time.Instant;
+
+/**
+ * DoFnRunner decorator which registers {@link MetricsContainer}.
+ */
+class DoFnRunnerWithMetrics<InputT, OutputT> implements DoFnRunner<InputT, OutputT> {
+
+  private final String stepName;
+  private final DoFnRunner<InputT, OutputT> delegate;
+  private final MetricsReporter metricsReporter;
+
+  DoFnRunnerWithMetrics(
+      String stepName,
+      DoFnRunner<InputT, OutputT> delegate,
+      MetricsReporter metricsReporter) {
+    this.stepName = checkNotNull(stepName, "stepName");
+    this.delegate = checkNotNull(delegate, "delegate");
+    this.metricsReporter = checkNotNull(metricsReporter, "metricsReporter");
+  }
+
+  @Override
+  public void startBundle() {
+    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
+        metricsReporter.getMetricsContainer(stepName))) {
+      delegate.startBundle();
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public void processElement(WindowedValue<InputT> elem) {
+    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
+        metricsReporter.getMetricsContainer(stepName))) {
+      delegate.processElement(elem);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public void onTimer(
+      String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) {
+    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
+        metricsReporter.getMetricsContainer(stepName))) {
+      delegate.onTimer(timerId, window, timestamp, timeDomain);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public void finishBundle() {
+    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
+        metricsReporter.getMetricsContainer(stepName))) {
+      delegate.finishBundle();
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+    metricsReporter.updateMetrics();
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java
new file mode 100644
index 0000000..145b224
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import java.io.Serializable;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ * An executor is a basic executable unit in a JStorm task.
+ */
+interface Executor extends Serializable {
+  /**
+   * Initialization during runtime.
+   */
+  void init(ExecutorContext context);
+
+  <T> void process(TupleTag<T> tag, WindowedValue<T> elem);
+
+  void cleanup();
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorContext.java
new file mode 100644
index 0000000..487db35
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorContext.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import backtype.storm.task.TopologyContext;
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.google.auto.value.AutoValue;
+
+/**
+ * Context of a executors bolt when runtime.
+ */
+@AutoValue
+abstract class ExecutorContext {
+  public static ExecutorContext of(
+      TopologyContext topologyContext,
+      ExecutorsBolt bolt,
+      IKvStoreManager kvStoreManager) {
+    return new AutoValue_ExecutorContext(topologyContext, bolt, kvStoreManager);
+  }
+
+  public abstract TopologyContext getTopologyContext();
+
+  public abstract ExecutorsBolt getExecutorsBolt();
+
+  public abstract IKvStoreManager getKvStoreManager();
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
new file mode 100644
index 0000000..ef12db8
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
@@ -0,0 +1,338 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.IRichBatchBolt;
+import backtype.storm.tuple.ITupleExt;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.alibaba.jstorm.cache.KvStoreManagerFactory;
+import com.alibaba.jstorm.cluster.Common;
+import com.alibaba.jstorm.utils.KryoSerializer;
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.TupleTag;
+import org.joda.time.Instant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * ExecutorsBolt is a JStorm Bolt composited with several executors chained in a sub-DAG.
+ */
+public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
+  private static final long serialVersionUID = -7751043327801735211L;
+
+  private static final Logger LOG = LoggerFactory.getLogger(ExecutorsBolt.class);
+
+  protected ExecutorContext executorContext;
+
+  protected TimerService timerService;
+
+  // map from input tag to executor inside bolt
+  protected final Map<TupleTag, Executor> inputTagToExecutor = Maps.newHashMap();
+  // set of all output tags that will be emit outside bolt
+  protected final Set<TupleTag> outputTags = Sets.newHashSet();
+  protected final Set<TupleTag> externalOutputTags = Sets.newHashSet();
+  protected final Set<DoFnExecutor> doFnExecutors = Sets.newHashSet();
+  protected int internalDoFnExecutorId = 1;
+  protected final Map<Integer, DoFnExecutor> idToDoFnExecutor = Maps.newHashMap();
+
+  protected OutputCollector collector;
+
+  protected boolean isStatefulBolt = false;
+
+  protected KryoSerializer<WindowedValue> serializer;
+
+  public ExecutorsBolt() {
+
+  }
+
+  public void setStatefulBolt(boolean isStateful) {
+    isStatefulBolt = isStateful;
+  }
+
+  public void addExecutor(TupleTag inputTag, Executor executor) {
+    inputTagToExecutor.put(
+        checkNotNull(inputTag, "inputTag"),
+        checkNotNull(executor, "executor"));
+  }
+
+  public Map<TupleTag, Executor> getExecutors() {
+    return inputTagToExecutor;
+  }
+
+  public void registerExecutor(Executor executor) {
+    if (executor instanceof DoFnExecutor) {
+      DoFnExecutor doFnExecutor = (DoFnExecutor) executor;
+      idToDoFnExecutor.put(internalDoFnExecutorId, doFnExecutor);
+      doFnExecutor.setInternalDoFnExecutorId(internalDoFnExecutorId);
+      internalDoFnExecutorId++;
+    }
+  }
+
+  public Map<Integer, DoFnExecutor> getIdToDoFnExecutor() {
+    return idToDoFnExecutor;
+  }
+
+  public void addOutputTags(TupleTag tag) {
+    outputTags.add(tag);
+  }
+
+  public void addExternalOutputTag(TupleTag<?> tag) {
+    externalOutputTags.add(tag);
+  }
+
+  public Set<TupleTag> getOutputTags() {
+    return outputTags;
+  }
+
+  public ExecutorContext getExecutorContext() {
+    return executorContext;
+  }
+
+  @Override
+  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+    LOG.info("Start to prepare for task-{}", context.getThisTaskId());
+    try {
+      this.collector = collector;
+
+      // init kv store manager
+      String storeName = String.format("task-%d", context.getThisTaskId());
+      String stateStorePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
+      IKvStoreManager kvStoreManager = isStatefulBolt
+              ? KvStoreManagerFactory.getKvStoreManagerWithMonitor(
+              context, storeName, stateStorePath, isStatefulBolt)
+              : KvStoreManagerFactory.getKvStoreManager(
+              stormConf, storeName, stateStorePath, isStatefulBolt);
+      this.executorContext = ExecutorContext.of(context, this, kvStoreManager);
+
+      // init time service
+      timerService = initTimerService();
+
+      // init all internal executors
+      for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
+        executor.init(executorContext);
+        if (executor instanceof DoFnExecutor) {
+          doFnExecutors.add((DoFnExecutor) executor);
+        }
+      }
+
+      this.serializer = new KryoSerializer<WindowedValue>(stormConf);
+
+      LOG.info("ExecutorsBolt finished init. LocalExecutors={}", inputTagToExecutor.values());
+      LOG.info("inputTagToExecutor={}", inputTagToExecutor);
+      LOG.info("outputTags={}", outputTags);
+      LOG.info("externalOutputTags={}", externalOutputTags);
+      LOG.info("doFnExecutors={}", doFnExecutors);
+    } catch (IOException e) {
+      throw new RuntimeException("Failed to prepare executors bolt", e);
+    }
+  }
+
+  public TimerService initTimerService() {
+    TopologyContext context = executorContext.getTopologyContext();
+    List<Integer> tasks = FluentIterable.from(context.getThisSourceComponentTasks().entrySet())
+        .transformAndConcat(
+            new Function<Map.Entry<String, List<Integer>>, Iterable<Integer>>() {
+              @Override
+              public Iterable<Integer> apply(Map.Entry<String, List<Integer>> value) {
+                if (Common.isSystemComponent(value.getKey())) {
+                  return Collections.EMPTY_LIST;
+                } else {
+                  return value.getValue();
+                }
+              }
+            })
+        .toList();
+    TimerService ret = new TimerServiceImpl(executorContext);
+    ret.init(tasks);
+    return ret;
+  }
+
+  @Override
+  public void execute(Tuple input) {
+    // process a batch
+    String streamId = input.getSourceStreamId();
+    ITupleExt tuple = (ITupleExt) input;
+    Iterator<List<Object>> valueIterator = tuple.batchValues().iterator();
+    if (CommonInstance.BEAM_WATERMARK_STREAM_ID.equals(streamId)) {
+      while (valueIterator.hasNext()) {
+        processWatermark((Long) valueIterator.next().get(0), input.getSourceTask());
+      }
+    } else {
+      doFnStartBundle();
+      while (valueIterator.hasNext()) {
+        processElement(valueIterator.next(), streamId);
+      }
+      doFnFinishBundle();
+    }
+  }
+
+  private void processWatermark(long watermarkTs, int sourceTask) {
+    long newWaterMark = timerService.updateInputWatermark(sourceTask, watermarkTs);
+    LOG.debug("Recv waterMark-{} from task-{}, newWaterMark={}",
+        (new Instant(watermarkTs)).toDateTime(),
+        sourceTask,
+        (new Instant(newWaterMark)).toDateTime());
+    if (newWaterMark != 0) {
+      // Some buffer windows are going to be triggered.
+      doFnStartBundle();
+      timerService.fireTimers(newWaterMark);
+
+      // SideInput: If receiving water mark with max timestamp, It means no more data is supposed
+      // to be received from now on. So we are going to process all push back data.
+      if (newWaterMark == BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis()) {
+        for (DoFnExecutor doFnExecutor : doFnExecutors) {
+          doFnExecutor.processAllPushBackElements();
+        }
+      }
+
+      doFnFinishBundle();
+    }
+
+    long currentWaterMark = timerService.currentOutputWatermark();
+    if (!externalOutputTags.isEmpty()) {
+      collector.flush();
+      collector.emit(
+          CommonInstance.BEAM_WATERMARK_STREAM_ID,
+          new Values(currentWaterMark));
+      LOG.debug("Send waterMark-{}", (new Instant(currentWaterMark)).toDateTime());
+    }
+  }
+
+  private void processElement(List<Object> values, String streamId) {
+    TupleTag inputTag = new TupleTag(streamId);
+    WindowedValue windowedValue = retrieveWindowedValueFromTupleValue(values);
+    processExecutorElem(inputTag, windowedValue);
+  }
+
+  public <T> void processExecutorElem(TupleTag<T> inputTag, WindowedValue<T> elem) {
+    LOG.debug("ProcessExecutorElem: inputTag={}, value={}", inputTag, elem.getValue());
+    if (elem != null) {
+      Executor executor = inputTagToExecutor.get(inputTag);
+      if (executor != null) {
+        executor.process(inputTag, elem);
+      }
+      if (externalOutputTags.contains(inputTag)) {
+        emitOutsideBolt(inputTag, elem);
+      }
+    } else {
+      LOG.info("Received null elem for tag={}", inputTag);
+    }
+  }
+
+  @Override
+  public void cleanup() {
+    for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
+      executor.cleanup();
+    }
+    executorContext.getKvStoreManager().close();
+  }
+
+  @Override
+  public Map<String, Object> getComponentConfiguration() {
+    return null;
+  }
+
+  public TimerService timerService() {
+    return timerService;
+  }
+
+  public void setTimerService(TimerService service) {
+    timerService = service;
+  }
+
+  private WindowedValue retrieveWindowedValueFromTupleValue(List<Object> values) {
+    WindowedValue wv = null;
+    if (values.size() > 1) {
+      Object key = values.get(0);
+      WindowedValue value = serializer.deserialize((byte[]) values.get(1));
+      wv = value.withValue(KV.of(key, value.getValue()));
+    } else {
+      wv = serializer.deserialize((byte[]) values.get(0));
+    }
+    return wv;
+  }
+
+  protected void emitOutsideBolt(TupleTag outputTag, WindowedValue outputValue) {
+    LOG.debug("Output outside: tag={}, value={}", outputTag, outputValue.getValue());
+    if (keyedEmit(outputTag.getId())) {
+      KV kv = (KV) outputValue.getValue();
+      byte[] immutableOutputValue = serializer.serialize(outputValue.withValue(kv.getValue()));
+      // Convert WindowedValue<KV> to <K, WindowedValue<V>>
+      if (kv.getKey() == null) {
+        // If key is null, emit "null" string here. Because, null value will be ignored in JStorm.
+        collector.emit(outputTag.getId(), new Values("null", immutableOutputValue));
+      } else {
+        collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableOutputValue));
+      }
+    } else {
+      byte[] immutableOutputValue = serializer.serialize(outputValue);
+      collector.emit(outputTag.getId(), new Values(immutableOutputValue));
+    }
+  }
+
+  private void doFnStartBundle() {
+    for (DoFnExecutor doFnExecutor : doFnExecutors) {
+      doFnExecutor.startBundle();
+    }
+  }
+
+  private void doFnFinishBundle() {
+    for (DoFnExecutor doFnExecutor : doFnExecutors) {
+      doFnExecutor.finishBundle();
+    }
+  }
+
+  @Override
+  public String toString() {
+    // LOG.info("bolt: " + executorContext.getTopologyContext().toJSONString());
+    List<String> ret = new ArrayList<>();
+        /*ret.add("inputTags");
+        for (TupleTag inputTag : inputTagToExecutor.keySet()) {
+            ret.add(inputTag.getId());
+        }*/
+    ret.add("internalExecutors");
+    for (Executor executor : inputTagToExecutor.values()) {
+      ret.add(executor.toString());
+    }
+    ret.add("externalOutputTags");
+    for (TupleTag output : externalOutputTags) {
+      ret.add(output.getId());
+    }
+    return Joiner.on('\n').join(ret).concat("\n");
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java
new file mode 100644
index 0000000..a64f494
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.Flatten}.
+ * @param <InputT>
+ */
+class FlattenExecutor<InputT> implements Executor {
+
+  private final String description;
+  private TupleTag mainOutputTag;
+  private ExecutorContext context;
+  private ExecutorsBolt executorsBolt;
+
+  public FlattenExecutor(String description, TupleTag mainTupleTag) {
+    this.description = checkNotNull(description, "description");
+    this.mainOutputTag = mainTupleTag;
+  }
+
+  @Override
+  public void init(ExecutorContext context) {
+    this.context = context;
+    this.executorsBolt = context.getExecutorsBolt();
+  }
+
+  @Override
+  public void process(TupleTag tag, WindowedValue elem) {
+    executorsBolt.processExecutorElem(mainOutputTag, elem);
+  }
+
+  @Override
+  public void cleanup() {
+  }
+
+  @Override
+  public String toString() {
+    return description;
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
new file mode 100644
index 0000000..89708df
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import com.google.common.collect.Maps;
+import java.util.Map;
+import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ * Translates a {@link Flatten} to a JStorm {@link FlattenExecutor}.
+ * @param <V>
+ */
+class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PCollections<V>> {
+
+  @Override
+  public void translateNode(Flatten.PCollections<V> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+
+    // Since a new tag is created in PCollectionList, retrieve the real tag here.
+    Map<TupleTag<?>, PValue> inputs = Maps.newHashMap();
+    for (Map.Entry<TupleTag<?>, PValue> entry : userGraphContext.getInputs().entrySet()) {
+      PCollection<V> pc = (PCollection<V>) entry.getValue();
+      inputs.putAll(pc.expand());
+    }
+    System.out.println("Real inputs: " + inputs);
+    System.out.println("FlattenList inputs: " + userGraphContext.getInputs());
+    String description = describeTransform(transform, inputs, userGraphContext.getOutputs());
+    FlattenExecutor executor = new FlattenExecutor(description, userGraphContext.getOutputTag());
+    context.addTransformExecutor(executor, inputs, userGraphContext.getOutputs());
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByKeyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByKeyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByKeyTranslator.java
new file mode 100644
index 0000000..85c958a
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByKeyTranslator.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import com.google.common.collect.Lists;
+import java.util.Collections;
+import java.util.List;
+import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.WindowingStrategy;
+
+/**
+ * Translates a {@link GroupByKey} to a JStorm {@link GroupByWindowExecutor}.
+ * @param <K>
+ * @param <V>
+ */
+class GroupByKeyTranslator<K, V> extends TransformTranslator.Default<GroupByKey<K, V>> {
+  // information of transform
+  protected PCollection<KV<K, V>> input;
+  protected PCollection<KV<K, Iterable<V>>> output;
+  protected List<TupleTag<?>> inputTags;
+  protected TupleTag<KV<K, Iterable<V>>> mainOutputTag;
+  protected List<TupleTag<?>> sideOutputTags;
+  protected List<PCollectionView<?>> sideInputs;
+  protected WindowingStrategy<?, ?> windowingStrategy;
+
+  @Override
+  public void translateNode(GroupByKey<K, V> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    String description =
+        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+
+    input = (PCollection<KV<K, V>>) userGraphContext.getInput();
+    output = (PCollection<KV<K, Iterable<V>>>) userGraphContext.getOutput();
+
+    inputTags = userGraphContext.getInputTags();
+    mainOutputTag = (TupleTag<KV<K, Iterable<V>>>) userGraphContext.getOutputTag();
+    sideOutputTags = Lists.newArrayList();
+
+    sideInputs = Collections.<PCollectionView<?>>emptyList();
+    windowingStrategy = input.getWindowingStrategy();
+
+    GroupByWindowExecutor<K, V> groupByWindowExecutor = new GroupByWindowExecutor<>(
+        userGraphContext.getStepName(),
+        description,
+        context,
+        context.getUserGraphContext().getOptions(),
+        windowingStrategy,
+        mainOutputTag,
+        sideOutputTags);
+    context.addTransformExecutor(groupByWindowExecutor);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java
new file mode 100644
index 0000000..bf6e1ad
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java
@@ -0,0 +1,173 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+import com.google.common.collect.ImmutableList;
+import java.io.Serializable;
+import java.util.List;
+import org.apache.beam.runners.core.DoFnRunner;
+import org.apache.beam.runners.core.DoFnRunners;
+import org.apache.beam.runners.core.GroupAlsoByWindowViaWindowSetNewDoFn;
+import org.apache.beam.runners.core.KeyedWorkItem;
+import org.apache.beam.runners.core.KeyedWorkItems;
+import org.apache.beam.runners.core.NullSideInputReader;
+import org.apache.beam.runners.core.StateInternals;
+import org.apache.beam.runners.core.StateInternalsFactory;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.runners.core.StateNamespaces;
+import org.apache.beam.runners.core.SystemReduceFn;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.runners.core.TimerInternalsFactory;
+import org.apache.beam.runners.jstorm.JStormPipelineOptions;
+import org.apache.beam.runners.jstorm.translation.TranslationContext.UserGraphContext;
+import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.WindowingStrategy;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.GroupByKey}.
+ * @param <K>
+ * @param <V>
+ */
+class GroupByWindowExecutor<K, V>
+    extends DoFnExecutor<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> {
+  private static final long serialVersionUID = -7563050475488610553L;
+
+  private static final Logger LOG = LoggerFactory.getLogger(GroupByWindowExecutor.class);
+
+  private class GroupByWindowOutputManager implements DoFnRunners.OutputManager, Serializable {
+
+    @Override
+    public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
+      executorsBolt.processExecutorElem(tag, output);
+    }
+  }
+
+  private KvCoder<K, V> inputKvCoder;
+  private SystemReduceFn<K, V, Iterable<V>, Iterable<V>, BoundedWindow> reduceFn;
+
+  public GroupByWindowExecutor(
+      String stepName,
+      String description,
+      TranslationContext context,
+      JStormPipelineOptions pipelineOptions,
+      WindowingStrategy<?, ?> windowingStrategy,
+      TupleTag<KV<K, Iterable<V>>> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
+    // The doFn will be created when runtime. Just pass "null" here
+    super(
+        stepName,
+        description,
+        pipelineOptions,
+        null,
+        null,
+        windowingStrategy,
+        null,
+        null,
+        null,
+        mainTupleTag,
+        sideOutputTags);
+
+    this.outputManager = new GroupByWindowOutputManager();
+    UserGraphContext userGraphContext = context.getUserGraphContext();
+    PCollection<KV<K, V>> input = (PCollection<KV<K, V>>) userGraphContext.getInput();
+    this.inputKvCoder = (KvCoder<K, V>) input.getCoder();
+  }
+
+  private DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getGroupByWindowDoFn() {
+    final StateInternalsFactory<K> stateFactory = new StateInternalsFactory<K>() {
+      @Override
+      public StateInternals stateInternalsForKey(K key) {
+        return new JStormStateInternals<K>(
+            key, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
+      }
+    };
+    TimerInternalsFactory<K> timerFactory = new TimerInternalsFactory<K>() {
+      @Override
+      public TimerInternals timerInternalsForKey(K key) {
+        return new JStormTimerInternals<>(
+            key,
+            GroupByWindowExecutor.this,
+            executorContext.getExecutorsBolt().timerService());
+      }
+    };
+
+    reduceFn = SystemReduceFn.buffering(inputKvCoder.getValueCoder());
+    DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFn =
+        GroupAlsoByWindowViaWindowSetNewDoFn.create(
+            windowingStrategy, stateFactory, timerFactory, NullSideInputReader.empty(),
+            (SystemReduceFn) reduceFn, outputManager, mainTupleTag);
+    return doFn;
+  }
+
+  @Override
+  protected DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getDoFnRunner() {
+    doFn = getGroupByWindowDoFn();
+
+    DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> simpleRunner = DoFnRunners.simpleRunner(
+        this.pipelineOptions,
+        this.doFn,
+        NullSideInputReader.empty(),
+        this.outputManager,
+        this.mainTupleTag,
+        this.sideOutputTags,
+        this.stepContext,
+        this.windowingStrategy);
+
+    DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFnRunner =
+        DoFnRunners.lateDataDroppingRunner(
+            simpleRunner,
+            this.stepContext,
+            this.windowingStrategy);
+    return new DoFnRunnerWithMetrics<>(
+        stepName, doFnRunner, MetricsReporter.create(metricClient));
+  }
+
+  @Override
+  public void process(TupleTag tag, WindowedValue elem) {
+    /**
+     *  For GroupByKey, KV type elem is received. We need to convert the KV elem
+     *  into KeyedWorkItem first, which is the expected type in LateDataDroppingDoFnRunner.
+     */
+    KeyedWorkItem<K, V> keyedWorkItem = RunnerUtils.toKeyedWorkItem((WindowedValue<KV<K, V>>) elem);
+    runner.processElement(elem.withValue(keyedWorkItem));
+  }
+
+  @Override
+  public void onTimer(Object key, TimerInternals.TimerData timerData) {
+    StateNamespace namespace = timerData.getNamespace();
+    checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
+
+    runner.processElement(
+        WindowedValue.valueInGlobalWindow(
+            KeyedWorkItems.<K, V>timersWorkItem((K) key, ImmutableList.of(timerData))));
+  }
+
+  @Override
+  public String toString() {
+    return super.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormBagState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormBagState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormBagState.java
new file mode 100644
index 0000000..3e5d52b
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormBagState.java
@@ -0,0 +1,180 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.alibaba.jstorm.cache.ComposedKey;
+import com.alibaba.jstorm.cache.IKvStore;
+import com.alibaba.jstorm.cache.KvStoreIterable;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import javax.annotation.Nullable;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.sdk.state.BagState;
+import org.apache.beam.sdk.state.ReadableState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Implementation of {@link BagState} in JStorm runner.
+ */
+class JStormBagState<K, T> implements BagState<T> {
+  private static final Logger LOG = LoggerFactory.getLogger(JStormBagState.class);
+
+  @Nullable
+  private final K key;
+  private final StateNamespace namespace;
+  private final IKvStore<ComposedKey, T> kvState;
+  private final IKvStore<ComposedKey, Object> stateInfoKvState;
+  private int elemIndex;
+
+  public JStormBagState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState,
+                        IKvStore<ComposedKey, Object> stateInfoKvState) throws IOException {
+    this.key = key;
+    this.namespace = checkNotNull(namespace, "namespace");
+    this.kvState = checkNotNull(kvState, "kvState");
+    this.stateInfoKvState = checkNotNull(stateInfoKvState, "stateInfoKvState");
+
+    Integer index = (Integer) stateInfoKvState.get(getComposedKey());
+    this.elemIndex = index != null ? ++index : 0;
+  }
+
+  @Override
+  public void add(T input) {
+    try {
+      kvState.put(getComposedKey(elemIndex), input);
+      stateInfoKvState.put(getComposedKey(), elemIndex);
+      elemIndex++;
+    } catch (IOException e) {
+      throw new RuntimeException(e.getCause());
+    }
+  }
+
+  @Override
+  public ReadableState<Boolean> isEmpty() {
+    return new ReadableState<Boolean>() {
+      @Override
+      public Boolean read() {
+        return elemIndex <= 0;
+      }
+
+      @Override
+      public ReadableState<Boolean> readLater() {
+        // TODO: support prefetch.
+        return this;
+      }
+    };
+  }
+
+  @Override
+  public Iterable<T> read() {
+    return new BagStateIterable(elemIndex);
+  }
+
+  @Override
+  public BagState readLater() {
+    // TODO: support prefetch.
+    return this;
+  }
+
+  @Override
+  public void clear() {
+    try {
+      for (int i = 0; i < elemIndex; i++) {
+        kvState.remove(getComposedKey(i));
+      }
+      stateInfoKvState.remove(getComposedKey());
+      elemIndex = 0;
+    } catch (IOException e) {
+      throw new RuntimeException(e.getCause());
+    }
+  }
+
+  private ComposedKey getComposedKey() {
+    return ComposedKey.of(key, namespace);
+  }
+
+  private ComposedKey getComposedKey(int elemIndex) {
+    return ComposedKey.of(key, namespace, elemIndex);
+  }
+
+  /**
+   * Implementation of Bag state Iterable.
+   */
+  private class BagStateIterable implements KvStoreIterable<T> {
+
+    private class BagStateIterator implements Iterator<T> {
+      private final int size;
+      private int cursor = 0;
+
+      BagStateIterator() {
+        Integer s = null;
+        try {
+          s = (Integer) stateInfoKvState.get(getComposedKey());
+        } catch (IOException e) {
+          LOG.error("Failed to get elemIndex for key={}", getComposedKey());
+        }
+        this.size = s != null ? ++s : 0;
+      }
+
+      @Override
+      public boolean hasNext() {
+        return cursor < size;
+      }
+
+      @Override
+      public T next() {
+        if (cursor >= size) {
+          throw new NoSuchElementException();
+        }
+
+        T value = null;
+        try {
+          value = kvState.get(getComposedKey(cursor));
+        } catch (IOException e) {
+          LOG.error("Failed to read composed key-[{}]", getComposedKey(cursor));
+        }
+        cursor++;
+        return value;
+      }
+
+      @Override
+      public void remove() {
+        throw new UnsupportedOperationException();
+      }
+    }
+
+    private final int size;
+
+    BagStateIterable(int size) {
+      this.size = size;
+    }
+
+    @Override
+    public Iterator<T> iterator() {
+      return new BagStateIterator();
+    }
+
+    @Override
+    public String toString() {
+      return String.format("BagStateIterable: composedKey=%s", getComposedKey());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormCombiningState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormCombiningState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormCombiningState.java
new file mode 100644
index 0000000..6bd021f
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormCombiningState.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import javax.annotation.Nullable;
+import org.apache.beam.sdk.state.BagState;
+import org.apache.beam.sdk.state.CombiningState;
+import org.apache.beam.sdk.state.ReadableState;
+import org.apache.beam.sdk.transforms.Combine;
+
+/**
+ * JStorm implementation of {@link CombiningState}.
+ */
+class JStormCombiningState<InputT, AccumT, OutputT>
+    implements CombiningState<InputT, AccumT, OutputT> {
+
+  @Nullable
+  private final BagState<AccumT> accumBagState;
+  private final Combine.CombineFn<InputT, AccumT, OutputT> combineFn;
+
+  JStormCombiningState(
+      BagState<AccumT> accumBagState,
+      Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
+    this.accumBagState = checkNotNull(accumBagState, "accumBagState");
+    this.combineFn = checkNotNull(combineFn, "combineFn");
+  }
+
+  @Override
+  public AccumT getAccum() {
+    // TODO: replacing the accumBagState with the merged accum.
+    return combineFn.mergeAccumulators(accumBagState.read());
+  }
+
+  @Override
+  public void addAccum(AccumT accumT) {
+    accumBagState.add(accumT);
+  }
+
+  @Override
+  public AccumT mergeAccumulators(Iterable<AccumT> iterable) {
+    return combineFn.mergeAccumulators(iterable);
+  }
+
+  @Override
+  public void add(InputT input) {
+    accumBagState.add(
+        combineFn.addInput(combineFn.createAccumulator(), input));
+  }
+
+  @Override
+  public ReadableState<Boolean> isEmpty() {
+    return accumBagState.isEmpty();
+  }
+
+  @Override
+  public OutputT read() {
+    return combineFn.extractOutput(
+        combineFn.mergeAccumulators(accumBagState.read()));
+  }
+
+  @Override
+  public CombiningState<InputT, AccumT, OutputT> readLater() {
+    // TODO: support prefetch.
+    return this;
+  }
+
+  @Override
+  public void clear() {
+    accumBagState.clear();
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormMapState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormMapState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormMapState.java
new file mode 100644
index 0000000..6a4e376
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormMapState.java
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import com.alibaba.jstorm.cache.IKvStore;
+import java.io.IOException;
+import java.util.Map;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.sdk.state.MapState;
+import org.apache.beam.sdk.state.ReadableState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Implementation of {@link MapState} in JStorm runner.
+ * @param <K>
+ * @param <V>
+ */
+class JStormMapState<K, V> implements MapState<K, V> {
+  private static final Logger LOG = LoggerFactory.getLogger(JStormMapState.class);
+
+  private final K key;
+  private final StateNamespace namespace;
+  private IKvStore<K, V> kvStore;
+
+  public JStormMapState(K key, StateNamespace namespace, IKvStore<K, V> kvStore) {
+    this.key = key;
+    this.namespace = namespace;
+    this.kvStore = kvStore;
+  }
+
+  @Override
+  public void put(K var1, V var2) {
+    try {
+      kvStore.put(var1, var2);
+    } catch (IOException e) {
+      reportError(String.format("Failed to put key=%s, value=%s", var1, var2), e);
+    }
+  }
+
+  @Override
+  public ReadableState<V> putIfAbsent(K var1, V var2) {
+    ReadableState<V> ret = null;
+    try {
+      V value = kvStore.get(var1);
+      if (value == null) {
+        kvStore.put(var1, var2);
+        ret = new MapReadableState<>(null);
+      } else {
+        ret = new MapReadableState<>(value);
+      }
+    } catch (IOException e) {
+      reportError(String.format("Failed to putIfAbsent key=%s, value=%s", var1, var2), e);
+    }
+    return ret;
+  }
+
+  @Override
+  public void remove(K var1) {
+    try {
+      kvStore.remove(var1);
+    } catch (IOException e) {
+      reportError(String.format("Failed to remove key=%s", var1), e);
+    }
+  }
+
+  @Override
+  public ReadableState<V> get(K var1) {
+    ReadableState<V> ret = new MapReadableState<>(null);
+    try {
+      ret = new MapReadableState(kvStore.get(var1));
+    } catch (IOException e) {
+      reportError(String.format("Failed to get value for key=%s", var1), e);
+    }
+    return ret;
+  }
+
+  @Override
+  public ReadableState<Iterable<K>> keys() {
+    ReadableState<Iterable<K>> ret = new MapReadableState<>(null);
+    try {
+      ret = new MapReadableState<>(kvStore.keys());
+    } catch (IOException e) {
+      reportError(String.format("Failed to get keys"), e);
+    }
+    return ret;
+  }
+
+  @Override
+  public ReadableState<Iterable<V>> values() {
+    ReadableState<Iterable<V>> ret = new MapReadableState<>(null);
+    try {
+      ret = new MapReadableState<>(kvStore.values());
+    } catch (IOException e) {
+      reportError(String.format("Failed to get values"), e);
+    }
+    return ret;
+  }
+
+  @Override
+  public ReadableState<Iterable<Map.Entry<K, V>>> entries() {
+    ReadableState<Iterable<Map.Entry<K, V>>> ret = new MapReadableState<>(null);
+    try {
+      ret = new MapReadableState<>(kvStore.entries());
+    } catch (IOException e) {
+      reportError(String.format("Failed to get values"), e);
+    }
+    return ret;
+  }
+
+  @Override
+  public void clear() {
+    try {
+      Iterable<K> keys = kvStore.keys();
+      kvStore.removeBatch(keys);
+    } catch (IOException e) {
+      reportError(String.format("Failed to clear map state"), e);
+    }
+  }
+
+  private void reportError(String errorInfo, IOException e) {
+    LOG.error(errorInfo, e);
+    throw new RuntimeException(errorInfo);
+  }
+
+  private class MapReadableState<T> implements ReadableState<T> {
+    private T value;
+
+    public MapReadableState(T value) {
+      this.value = value;
+    }
+
+    @Override
+    public T read() {
+      return value;
+    }
+
+    @Override
+    public ReadableState<T> readLater() {
+      return this;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormPipelineTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormPipelineTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormPipelineTranslator.java
index 1449a43..298ad32 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormPipelineTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormPipelineTranslator.java
@@ -22,8 +22,6 @@ import com.google.common.collect.Iterables;
 import java.util.List;
 import org.apache.beam.runners.core.construction.PTransformMatchers;
 import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
-import org.apache.beam.runners.jstorm.translation.translator.TransformTranslator;
-import org.apache.beam.runners.jstorm.translation.translator.ViewTranslator;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.runners.AppliedPTransform;
 import org.apache.beam.sdk.runners.PTransformOverride;


[18/53] [abbrv] beam git commit: jstorm-runner: remove top level classes RunnerUtils and SingletonKeyedWorkItem.

Posted by pe...@apache.org.
jstorm-runner: remove top level classes RunnerUtils and SingletonKeyedWorkItem.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/74ceac61
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/74ceac61
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/74ceac61

Branch: refs/heads/jstorm-runner
Commit: 74ceac6173f78c76247b9ea4cb8179ca1ed9f62d
Parents: 8265353
Author: Pei He <pe...@apache.org>
Authored: Fri Jul 14 15:40:23 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:57 2017 +0800

----------------------------------------------------------------------
 .../translation/GroupByWindowExecutor.java      | 45 +++++++++++++-
 .../runners/jstorm/translation/RunnerUtils.java | 51 ----------------
 .../translation/SingletonKeyedWorkItem.java     | 62 --------------------
 .../jstorm/translation/TranslationContext.java  | 13 +++-
 4 files changed, 56 insertions(+), 115 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/74ceac61/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java
index bf6e1ad..1c858b7 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java
@@ -21,6 +21,7 @@ import static com.google.common.base.Preconditions.checkArgument;
 
 import com.google.common.collect.ImmutableList;
 import java.io.Serializable;
+import java.util.Collections;
 import java.util.List;
 import org.apache.beam.runners.core.DoFnRunner;
 import org.apache.beam.runners.core.DoFnRunners;
@@ -152,7 +153,7 @@ class GroupByWindowExecutor<K, V>
      *  For GroupByKey, KV type elem is received. We need to convert the KV elem
      *  into KeyedWorkItem first, which is the expected type in LateDataDroppingDoFnRunner.
      */
-    KeyedWorkItem<K, V> keyedWorkItem = RunnerUtils.toKeyedWorkItem((WindowedValue<KV<K, V>>) elem);
+    KeyedWorkItem<K, V> keyedWorkItem = toKeyedWorkItem((WindowedValue<KV<K, V>>) elem);
     runner.processElement(elem.withValue(keyedWorkItem));
   }
 
@@ -170,4 +171,46 @@ class GroupByWindowExecutor<K, V>
   public String toString() {
     return super.toString();
   }
+
+  private <K, V> KeyedWorkItem<K, V> toKeyedWorkItem(WindowedValue<KV<K, V>> kvElem) {
+    SingletonKeyedWorkItem<K, V> workItem = SingletonKeyedWorkItem.of(
+        kvElem.getValue().getKey(),
+        kvElem.withValue(kvElem.getValue().getValue()));
+    return workItem;
+  }
+
+  private static class SingletonKeyedWorkItem<K, ElemT> implements KeyedWorkItem<K, ElemT> {
+
+    final K key;
+    final WindowedValue<ElemT> value;
+
+    private SingletonKeyedWorkItem(K key, WindowedValue<ElemT> value) {
+      this.key = key;
+      this.value = value;
+    }
+
+    public static <K, ElemT> SingletonKeyedWorkItem<K, ElemT> of(
+        K key, WindowedValue<ElemT> value) {
+      return new SingletonKeyedWorkItem<>(key, value);
+    }
+
+    @Override
+    public K key() {
+      return key;
+    }
+
+    public WindowedValue<ElemT> value() {
+      return value;
+    }
+
+    @Override
+    public Iterable<TimerInternals.TimerData> timersIterable() {
+      return Collections.EMPTY_LIST;
+    }
+
+    @Override
+    public Iterable<WindowedValue<ElemT>> elementsIterable() {
+      return Collections.singletonList(value);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/74ceac61/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/RunnerUtils.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/RunnerUtils.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/RunnerUtils.java
deleted file mode 100644
index 4f469f3..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/RunnerUtils.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation;
-
-import org.apache.beam.runners.core.KeyedWorkItem;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-
-/**
- * Utils for JStorm runner.
- */
-class RunnerUtils {
-  /**
-   * Convert {@link WindowedValue} into {@link KeyedWorkItem}.
-   * @param elem
-   * @return
-   */
-  public static <K, V> KeyedWorkItem<K, V> toKeyedWorkItem(WindowedValue<KV<K, V>> elem) {
-    WindowedValue<KV<K, V>> kvElem = (WindowedValue<KV<K, V>>) elem;
-    SingletonKeyedWorkItem<K, V> workItem = SingletonKeyedWorkItem.of(
-        kvElem.getValue().getKey(),
-        kvElem.withValue(kvElem.getValue().getValue()));
-    return workItem;
-  }
-
-  public static boolean isGroupByKeyExecutor(Executor executor) {
-    if (executor instanceof GroupByWindowExecutor) {
-      return true;
-    } else if (executor instanceof StatefulDoFnExecutor
-            || executor instanceof MultiStatefulDoFnExecutor) {
-      return true;
-    } else {
-      return false;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/74ceac61/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SingletonKeyedWorkItem.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SingletonKeyedWorkItem.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SingletonKeyedWorkItem.java
deleted file mode 100644
index b321c76..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/SingletonKeyedWorkItem.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation;
-
-import java.util.Collections;
-import org.apache.beam.runners.core.KeyedWorkItem;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.sdk.util.WindowedValue;
-
-/**
- * Singleton keyed word item.
- * @param <K>
- * @param <ElemT>
- */
-class SingletonKeyedWorkItem<K, ElemT> implements KeyedWorkItem<K, ElemT> {
-
-  final K key;
-  final WindowedValue<ElemT> value;
-
-  private SingletonKeyedWorkItem(K key, WindowedValue<ElemT> value) {
-    this.key = key;
-    this.value = value;
-  }
-
-  public static <K, ElemT> SingletonKeyedWorkItem<K, ElemT> of(K key, WindowedValue<ElemT> value) {
-    return new SingletonKeyedWorkItem<K, ElemT>(key, value);
-  }
-
-  @Override
-  public K key() {
-    return key;
-  }
-
-  public WindowedValue<ElemT> value() {
-    return value;
-  }
-
-  @Override
-  public Iterable<TimerInternals.TimerData> timersIterable() {
-    return Collections.EMPTY_LIST;
-  }
-
-  @Override
-  public Iterable<WindowedValue<ElemT>> elementsIterable() {
-    return Collections.singletonList(value);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/74ceac61/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
index b84fd4a..e25f211 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
@@ -158,7 +158,7 @@ public class TranslationContext {
      * d) For the purpose of performance to reduce the side effects between multiple streams which
      *    is output to same executor, a new bolt will be created.
      */
-    if (RunnerUtils.isGroupByKeyExecutor(executor)) {
+    if (isGroupByKeyExecutor(executor)) {
       bolt = new ExecutorsBolt();
       name = executionGraphContext.registerBolt(bolt);
       isGBK = true;
@@ -435,4 +435,15 @@ public class TranslationContext {
       return id++;
     }
   }
+
+  private boolean isGroupByKeyExecutor(Executor executor) {
+    if (executor instanceof GroupByWindowExecutor) {
+      return true;
+    } else if (executor instanceof StatefulDoFnExecutor
+        || executor instanceof MultiStatefulDoFnExecutor) {
+      return true;
+    } else {
+      return false;
+    }
+  }
 }


[41/53] [abbrv] beam git commit: jstorm-runner: Fixup for review comments

Posted by pe...@apache.org.
jstorm-runner: Fixup for review comments


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/90ed2ef3
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/90ed2ef3
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/90ed2ef3

Branch: refs/heads/jstorm-runner
Commit: 90ed2ef344d19ca730429e9eb7c71779f995fc47
Parents: 6078cbc
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Mon Aug 14 16:20:03 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:03:00 2017 +0800

----------------------------------------------------------------------
 .../runners/jstorm/JStormPipelineOptions.java   |  12 +--
 .../beam/runners/jstorm/JStormRunner.java       |  21 ++--
 .../beam/runners/jstorm/JStormRunnerResult.java |  21 ++--
 .../beam/runners/jstorm/TestJStormRunner.java   |  19 +++-
 .../serialization/JavaUtilsSerializer.java      |   3 +-
 .../translation/BoundedSourceTranslator.java    |   4 +-
 .../jstorm/translation/DoFnExecutor.java        |  27 +++--
 .../runners/jstorm/translation/Executor.java    |   6 ++
 .../jstorm/translation/ExecutorsBolt.java       |   8 +-
 .../jstorm/translation/FlattenExecutor.java     |   1 -
 .../jstorm/translation/FlattenTranslator.java   |  23 ++--
 .../translation/GroupByKeyTranslator.java       |  12 ---
 .../translation/GroupByWindowExecutor.java      |  12 ---
 .../translation/JStormStateInternals.java       |  29 +++--
 .../jstorm/translation/MetricsReporter.java     |   2 -
 .../translation/MultiOutputDoFnExecutor.java    |  22 +---
 .../translation/MultiStatefulDoFnExecutor.java  |   5 +-
 .../translation/ParDoBoundMultiTranslator.java  |  14 +--
 .../translation/ParDoBoundTranslator.java       | 108 -------------------
 .../translation/StatefulDoFnExecutor.java       |   1 -
 .../jstorm/translation/TimerService.java        |   2 +-
 .../jstorm/translation/TimerServiceImpl.java    |   8 +-
 .../jstorm/translation/TransformTranslator.java |  16 ++-
 .../jstorm/translation/TranslationContext.java  |  18 +++-
 .../jstorm/translation/TranslatorRegistry.java  |   1 -
 .../translation/UnboundedSourceSpout.java       |   8 +-
 .../jstorm/translation/ViewTranslator.java      |   4 +-
 .../translation/WindowAssignExecutor.java       |   2 -
 .../translation/JStormStateInternalsTest.java   |   2 +-
 29 files changed, 141 insertions(+), 270 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormPipelineOptions.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormPipelineOptions.java
index 114877a..e494757 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormPipelineOptions.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormPipelineOptions.java
@@ -36,8 +36,8 @@ public interface JStormPipelineOptions extends PipelineOptions {
 
   @Description("Executing time(sec) of topology on local mode. Default is 1min.")
   @Default.Long(60)
-  Long getLocalModeExecuteTime();
-  void setLocalModeExecuteTime(Long time);
+  Long getLocalModeExecuteTimeSec();
+  void setLocalModeExecuteTimeSec(Long time);
 
   @Description("Worker number of topology")
   @Default.Integer(1)
@@ -46,8 +46,8 @@ public interface JStormPipelineOptions extends PipelineOptions {
 
   @Description("Global parallelism number of a component")
   @Default.Integer(1)
-  Integer getParallelismNumber();
-  void setParallelismNumber(Integer number);
+  Integer getParallelism();
+  void setParallelism(Integer number);
 
   @Description("System topology config of JStorm")
   @Default.InstanceFactory(DefaultMapValueFactory.class)
@@ -61,8 +61,8 @@ public interface JStormPipelineOptions extends PipelineOptions {
 
   @Description("Parallelism number of a specified composite PTransform")
   @Default.InstanceFactory(DefaultMapValueFactory.class)
-  Map getParallelismNumMap();
-  void setParallelismNumMap(Map parallelismNumMap);
+  Map getParallelismMap();
+  void setParallelismMap(Map parallelismNumMap);
 
   /**
    * Default value factory for topology configuration of JStorm.

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
index 47de42c..21a8fae 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
@@ -79,15 +79,15 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
   }
 
   public static JStormRunner fromOptions(PipelineOptions options) {
-    JStormPipelineOptions pipelineOptions = PipelineOptionsValidator.validate(
-        JStormPipelineOptions.class, options);
+    JStormPipelineOptions pipelineOptions =
+        PipelineOptionsValidator.validate(JStormPipelineOptions.class, options);
     return new JStormRunner(pipelineOptions);
   }
 
   /**
-   * convert pipeline options to storm configuration format.
+   * Convert pipeline options to JStorm configuration format.
    * @param options
-   * @return
+   * @return JStorm configuration
    */
   private Config convertPipelineOptionsToConfig(JStormPipelineOptions options) {
     Config config = new Config();
@@ -103,6 +103,8 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
 
     // Setup config for runtime env
     config.put("worker.external", "beam");
+    // We use "com.alibaba.jstorm.transactional" API for "at least once" and "exactly once",
+    // so we don't need acker task for beam job any more, and set related number to 0.
     config.put("topology.acker.executors", 0);
 
     // Register serializers of Kryo
@@ -271,7 +273,7 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
         LocalCluster localCluster = LocalCluster.getInstance();
         localCluster.submitTopology(topologyName, config, topology);
         return JStormRunnerResult.local(
-            topologyName, config, localCluster, options.getLocalModeExecuteTime());
+            topologyName, config, localCluster, options.getLocalModeExecuteTimeSec());
       } else {
         StormSubmitter.submitTopology(topologyName, config, topology);
         return null;
@@ -298,11 +300,12 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
     TopologyBuilder builder =
         isExactlyOnce ? new TransactionTopologyBuilder() : new TopologyBuilder();
 
-    int parallelismNumber = options.getParallelismNumber();
+    int parallelismNumber = options.getParallelism();
     Map<String, UnboundedSourceSpout> spouts = context.getSpouts();
-    for (String id : spouts.keySet()) {
-      IRichSpout spout = getSpout(isExactlyOnce, spouts.get(id));
-      builder.setSpout(id, spout, getParallelismNum(spouts.get(id), parallelismNumber));
+    for (Map.Entry<String, UnboundedSourceSpout> entry : spouts.entrySet()) {
+      IRichSpout spout = getSpout(isExactlyOnce, entry.getValue());
+      builder.setSpout(
+          entry.getKey(), spout, getParallelismNum(entry.getValue(), parallelismNumber));
     }
 
     HashMap<String, BoltDeclarer> declarers = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerResult.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerResult.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerResult.java
index 797c899..4b1850e 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerResult.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerResult.java
@@ -38,7 +38,7 @@ public abstract class JStormRunnerResult implements PipelineResult {
       Config config,
       LocalCluster localCluster,
       long localModeExecuteTimeSecs) {
-    return new LocalStormPipelineResult(
+    return new LocalJStormPipelineResult(
         topologyName, config, localCluster, localModeExecuteTimeSecs);
   }
 
@@ -62,12 +62,12 @@ public abstract class JStormRunnerResult implements PipelineResult {
     return topologyName;
   }
 
-  private static class LocalStormPipelineResult extends JStormRunnerResult {
+  private static class LocalJStormPipelineResult extends JStormRunnerResult {
 
     private LocalCluster localCluster;
     private long localModeExecuteTimeSecs;
 
-    LocalStormPipelineResult(
+    LocalJStormPipelineResult(
         String topologyName,
         Config config,
         LocalCluster localCluster,
@@ -78,7 +78,6 @@ public abstract class JStormRunnerResult implements PipelineResult {
 
     @Override
     public State cancel() throws IOException {
-      //localCluster.deactivate(getTopologyName());
       localCluster.killTopology(getTopologyName());
       localCluster.shutdown();
       JStormUtils.sleepMs(1000);
@@ -87,12 +86,7 @@ public abstract class JStormRunnerResult implements PipelineResult {
 
     @Override
     public State waitUntilFinish(Duration duration) {
-      return waitUntilFinish();
-    }
-
-    @Override
-    public State waitUntilFinish() {
-      JStormUtils.sleepMs(localModeExecuteTimeSecs * 1000);
+      JStormUtils.sleepMs(duration.getMillis());
       try {
         return cancel();
       } catch (IOException e) {
@@ -101,8 +95,13 @@ public abstract class JStormRunnerResult implements PipelineResult {
     }
 
     @Override
+    public State waitUntilFinish() {
+      return waitUntilFinish(Duration.standardSeconds(localModeExecuteTimeSecs));
+    }
+
+    @Override
     public MetricResults metrics() {
-      return null;
+      throw new UnsupportedOperationException("This method is not yet supported.");
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
index 21a58e3..c9990e4 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
@@ -50,13 +50,21 @@ public class TestJStormRunner extends PipelineRunner<JStormRunnerResult> {
     return new TestJStormRunner(options.as(JStormPipelineOptions.class));
   }
 
+  // waiting time when job with assertion
+  private static final int ASSERTION_WAITING_TIME_MS = 20 * 1000;
+  // waiting time when job without assertion
+  private static final int RESULT_WAITING_TIME_MS = 5 * 1000;
+  private static final int RESULT_CHECK_INTERVAL_MS = 500;
+
   private final JStormRunner stormRunner;
   private final JStormPipelineOptions options;
 
   private TestJStormRunner(JStormPipelineOptions options) {
     this.options = options;
     Map conf = Maps.newHashMap();
-    //conf.put(ConfigExtension.KV_STORE_TYPE, KvStoreManagerFactory.KvStoreType.memory.toString());
+    // Default state backend is RocksDB, for the users who could not run RocksDB on local testing
+    // env, following config is used to configure state backend to memory.
+    // conf.put(ConfigExtension.KV_STORE_TYPE, KvStoreManagerFactory.KvStoreType.memory.toString());
     options.setTopologyConfig(conf);
     options.setLocalMode(true);
     stormRunner = JStormRunner.fromOptions(checkNotNull(options, "options"));
@@ -73,8 +81,9 @@ public class TestJStormRunner extends PipelineRunner<JStormRunnerResult> {
       LOG.info("Running JStorm job {} with {} expected assertions.",
                result.getTopologyName(), numberOfAssertions);
 
-      int maxTimeoutSec = numberOfAssertions > 0 ? 20 : 5;
-      for (int waitTime = 0; waitTime <= maxTimeoutSec * 1000; ) {
+      int maxTimeoutMs =
+          numberOfAssertions > 0 ? ASSERTION_WAITING_TIME_MS : RESULT_WAITING_TIME_MS;
+      for (int waitTime = 0; waitTime <= maxTimeoutMs; ) {
         Optional<Boolean> success = numberOfAssertions > 0
                 ? checkForPAssertSuccess(numberOfAssertions) : Optional.<Boolean>absent();
         Exception taskExceptionRec = TaskReportErrorAndDie.getExceptionRecord();
@@ -86,8 +95,8 @@ public class TestJStormRunner extends PipelineRunner<JStormRunnerResult> {
           LOG.info("Exception was found.", taskExceptionRec);
           throw new RuntimeException(taskExceptionRec.getCause());
         } else {
-          JStormUtils.sleepMs(500);
-          waitTime += 500;
+          JStormUtils.sleepMs(RESULT_CHECK_INTERVAL_MS);
+          waitTime += RESULT_CHECK_INTERVAL_MS;
         }
       }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JavaUtilsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JavaUtilsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JavaUtilsSerializer.java
index 5df686c..fa46fdb 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JavaUtilsSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JavaUtilsSerializer.java
@@ -45,7 +45,7 @@ import java.util.TreeSet;
 public class JavaUtilsSerializer {
 
   /**
-   * Specific {@link Kryo} serializer for {@link java.util.Collections.SingletonList}.
+   * Specific {@link Kryo} serializer for {@code java.util.Collections.SingletonList}.
    */
   public static class CollectionsSingletonListSerializer extends Serializer<List<?>> {
     public CollectionsSingletonListSerializer() {
@@ -222,7 +222,6 @@ public class JavaUtilsSerializer {
    * @see Collections#unmodifiableSortedMap(SortedMap)
    */
   private static void registerUnmodifableCollectionSerializers(Config config) {
-    UnmodifiableCollection.values();
     for (final UnmodifiableCollection item : UnmodifiableCollection.values()) {
       config.registerSerialization(item.type, UnmodifiableCollectionsSerializer.class);
     }

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java
index 53555c9..77d0823 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java
@@ -24,9 +24,7 @@ import org.apache.beam.sdk.values.TaggedPValue;
 import org.apache.beam.sdk.values.TupleTag;
 
 /**
- * Translates a {@link Read.Bounded} into a Storm spout.
- *
- * @param <T>
+ * Translates a {@link Read.Bounded} into a JStorm spout.
  */
 class BoundedSourceTranslator<T> extends TransformTranslator.Default<Read.Bounded<T>> {
 

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
index 2148f34..72c386a 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
@@ -71,9 +71,15 @@ class DoFnExecutor<InputT, OutputT> implements Executor {
   /**
    * Implements {@link OutputManager} in a DoFn executor.
    */
-  public class DoFnExecutorOutputManager implements OutputManager, Serializable {
+  protected static class DoFnExecutorOutputManager implements OutputManager, Serializable {
     private static final long serialVersionUID = -661113364735206170L;
 
+    private ExecutorsBolt executorsBolt;
+
+    public DoFnExecutorOutputManager(ExecutorsBolt executorsBolt) {
+      this.executorsBolt = executorsBolt;
+    }
+
     @Override
     public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
       executorsBolt.processExecutorElem(tag, output);
@@ -97,23 +103,23 @@ class DoFnExecutor<InputT, OutputT> implements Executor {
 
   protected DoFn<InputT, OutputT> doFn;
   protected final Coder<WindowedValue<InputT>> inputCoder;
-  protected DoFnInvoker<InputT, OutputT> doFnInvoker;
-  protected OutputManager outputManager;
+  protected transient DoFnInvoker<InputT, OutputT> doFnInvoker;
+  protected transient OutputManager outputManager;
   protected WindowingStrategy<?, ?> windowingStrategy;
   protected final TupleTag<InputT> mainInputTag;
   protected Collection<PCollectionView<?>> sideInputs;
-  protected SideInputHandler sideInputHandler;
+  protected transient SideInputHandler sideInputHandler;
   protected final Map<TupleTag, PCollectionView<?>> sideInputTagToView;
 
   // Initialize during runtime
-  protected ExecutorContext executorContext;
+  protected transient ExecutorContext executorContext;
   protected ExecutorsBolt executorsBolt;
-  protected TimerInternals timerInternals;
+  protected transient TimerInternals timerInternals;
   protected transient StateInternals pushbackStateInternals;
   protected transient StateTag<BagState<WindowedValue<InputT>>> pushedBackTag;
   protected transient StateTag<WatermarkHoldState> watermarkHoldTag;
   protected transient IKvStoreManager kvStoreManager;
-  protected DefaultStepContext stepContext;
+  protected transient DefaultStepContext stepContext;
   protected transient MetricClient metricClient;
 
   public DoFnExecutor(
@@ -133,7 +139,6 @@ class DoFnExecutor<InputT, OutputT> implements Executor {
     this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
     this.doFn = doFn;
     this.inputCoder = inputCoder;
-    this.outputManager = new DoFnExecutorOutputManager();
     this.windowingStrategy = windowingStrategy;
     this.mainInputTag = mainInputTag;
     this.sideInputs = sideInputs;
@@ -174,6 +179,7 @@ class DoFnExecutor<InputT, OutputT> implements Executor {
     this.executorsBolt = context.getExecutorsBolt();
     this.pipelineOptions =
         this.serializedOptions.getPipelineOptions().as(JStormPipelineOptions.class);
+    this.outputManager = new DoFnExecutorOutputManager(executorsBolt);
 
     initService(context);
 
@@ -199,8 +205,6 @@ class DoFnExecutor<InputT, OutputT> implements Executor {
 
   @Override
   public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-    LOG.debug(String.format("process: elemTag=%s, mainInputTag=%s, sideInputs=%s, elem={}",
-        tag, mainInputTag, sideInputs, elem.getValue()));
     if (mainInputTag.equals(tag)) {
       processMainInput(elem);
     } else if (sideInputTagToView.containsKey(tag)) {
@@ -213,6 +217,7 @@ class DoFnExecutor<InputT, OutputT> implements Executor {
   }
 
   protected <T> void processMainInput(WindowedValue<T> elem) {
+    LOG.debug(String.format("Main input: tag=%s, elem=%s", mainInputTag, elem));
     if (sideInputs.isEmpty()) {
       runner.processElement((WindowedValue<InputT>) elem);
     } else {
@@ -234,7 +239,7 @@ class DoFnExecutor<InputT, OutputT> implements Executor {
   }
 
   protected void processSideInput(TupleTag tag, WindowedValue elem) {
-    LOG.debug(String.format("side inputs: %s, %s.", tag, elem));
+    LOG.debug(String.format("Side inputs: tag=%s, elem=%s.", tag, elem));
 
     PCollectionView<?> sideInputView = sideInputTagToView.get(tag);
     sideInputHandler.addSideInputValue(sideInputView, elem);

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java
index 8812988..fd7af7d 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java
@@ -30,7 +30,13 @@ public interface Executor extends Serializable {
    */
   void init(ExecutorContext context);
 
+  /**
+   * Process element form "tag" stream.
+   */
   <T> void process(TupleTag<T> tag, WindowedValue<T> elem);
 
+  /**
+   * Cleanup when task is shutdown.
+   */
   void cleanup();
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
index f8e09be..449ecb5 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
@@ -58,9 +58,9 @@ public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
 
   private static final Logger LOG = LoggerFactory.getLogger(ExecutorsBolt.class);
 
-  protected ExecutorContext executorContext;
+  protected transient ExecutorContext executorContext;
 
-  protected TimerService timerService;
+  protected transient TimerService timerService;
 
   // map from input tag to executor inside bolt
   protected final Map<TupleTag, Executor> inputTagToExecutor = Maps.newHashMap();
@@ -73,7 +73,7 @@ public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
   protected int internalDoFnExecutorId = 1;
   protected final Map<Integer, DoFnExecutor> idToDoFnExecutor = Maps.newHashMap();
 
-  protected OutputCollector collector;
+  protected transient OutputCollector collector;
 
   protected boolean isStatefulBolt = false;
 
@@ -265,8 +265,8 @@ public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
   }
 
   public <T> void processExecutorElem(TupleTag<T> inputTag, WindowedValue<T> elem) {
-    LOG.debug("ProcessExecutorElem: value={} from tag={}", elem.getValue(), inputTag);
     if (elem != null) {
+      LOG.debug("ProcessExecutorElem: value={} from tag={}", elem.getValue(), inputTag);
       Executor executor = inputTagToExecutor.get(inputTag);
       if (executor != null) {
         executor.process(inputTag, elem);

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java
index 928fa24..9d4184c 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java
@@ -26,7 +26,6 @@ import org.apache.beam.sdk.values.TupleTag;
 
 /**
  * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.Flatten}.
- * @param <InputT>
  */
 class FlattenExecutor<InputT> implements Executor {
 

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
index ebe8bc3..62621d0 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
@@ -30,7 +30,6 @@ import org.apache.beam.sdk.io.UnboundedSource;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.TaggedPValue;
 import org.apache.beam.sdk.values.TupleTag;
@@ -46,20 +45,18 @@ class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PCollecti
   public void translateNode(Flatten.PCollections<V> transform, TranslationContext context) {
     TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
 
-    // Since a new tag is created in PCollectionList, retrieve the real tag here.
+    // Flatten supports to consume multi-copy from a same PCollection, so we need to record
+    // the copy number here.
     Map<TupleTag<?>, PValue> inputs = Maps.newHashMap();
     Map<TupleTag<?>, Integer> tagToCopyNum = Maps.newHashMap();
-    for (Map.Entry<TupleTag<?>, PValue> entry : userGraphContext.getInputs().entrySet()) {
-      PCollection<V> pc = (PCollection<V>) entry.getValue();
-      //inputs.putAll(pc.expand());
-      for (Map.Entry<TupleTag<?>, PValue> entry1 : pc.expand().entrySet()) {
-        if (inputs.containsKey(entry1.getKey())) {
-          int copyNum = tagToCopyNum.get(entry1.getKey());
-          tagToCopyNum.put(entry1.getKey(), ++copyNum);
-        } else {
-          inputs.put(entry1.getKey(), entry1.getValue());
-          tagToCopyNum.put(entry1.getKey(), 1);
-        }
+    for (Map.Entry<TupleTag<?>, PValue> entry : userGraphContext.getTransformInputs().entrySet()) {
+      TupleTag tag = userGraphContext.findTupleTag(entry.getValue());
+      if (inputs.containsKey(tag)) {
+        int copyNum = tagToCopyNum.get(tag);
+        tagToCopyNum.put(tag, ++copyNum);
+      } else {
+        inputs.put(tag, entry.getValue());
+        tagToCopyNum.put(tag, 1);
       }
     }
     String description = describeTransform(transform, inputs, userGraphContext.getOutputs());

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByKeyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByKeyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByKeyTranslator.java
index 85c958a..02f42bd 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByKeyTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByKeyTranslator.java
@@ -18,28 +18,21 @@
 package org.apache.beam.runners.jstorm.translation;
 
 import com.google.common.collect.Lists;
-import java.util.Collections;
 import java.util.List;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
 
 /**
  * Translates a {@link GroupByKey} to a JStorm {@link GroupByWindowExecutor}.
- * @param <K>
- * @param <V>
  */
 class GroupByKeyTranslator<K, V> extends TransformTranslator.Default<GroupByKey<K, V>> {
   // information of transform
   protected PCollection<KV<K, V>> input;
-  protected PCollection<KV<K, Iterable<V>>> output;
-  protected List<TupleTag<?>> inputTags;
   protected TupleTag<KV<K, Iterable<V>>> mainOutputTag;
   protected List<TupleTag<?>> sideOutputTags;
-  protected List<PCollectionView<?>> sideInputs;
   protected WindowingStrategy<?, ?> windowingStrategy;
 
   @Override
@@ -49,13 +42,8 @@ class GroupByKeyTranslator<K, V> extends TransformTranslator.Default<GroupByKey<
         describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
 
     input = (PCollection<KV<K, V>>) userGraphContext.getInput();
-    output = (PCollection<KV<K, Iterable<V>>>) userGraphContext.getOutput();
-
-    inputTags = userGraphContext.getInputTags();
     mainOutputTag = (TupleTag<KV<K, Iterable<V>>>) userGraphContext.getOutputTag();
     sideOutputTags = Lists.newArrayList();
-
-    sideInputs = Collections.<PCollectionView<?>>emptyList();
     windowingStrategy = input.getWindowingStrategy();
 
     GroupByWindowExecutor<K, V> groupByWindowExecutor = new GroupByWindowExecutor<>(

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java
index 1c858b7..cae1bc3 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/GroupByWindowExecutor.java
@@ -20,7 +20,6 @@ package org.apache.beam.runners.jstorm.translation;
 import static com.google.common.base.Preconditions.checkArgument;
 
 import com.google.common.collect.ImmutableList;
-import java.io.Serializable;
 import java.util.Collections;
 import java.util.List;
 import org.apache.beam.runners.core.DoFnRunner;
@@ -51,8 +50,6 @@ import org.slf4j.LoggerFactory;
 
 /**
  * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.GroupByKey}.
- * @param <K>
- * @param <V>
  */
 class GroupByWindowExecutor<K, V>
     extends DoFnExecutor<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> {
@@ -60,14 +57,6 @@ class GroupByWindowExecutor<K, V>
 
   private static final Logger LOG = LoggerFactory.getLogger(GroupByWindowExecutor.class);
 
-  private class GroupByWindowOutputManager implements DoFnRunners.OutputManager, Serializable {
-
-    @Override
-    public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-      executorsBolt.processExecutorElem(tag, output);
-    }
-  }
-
   private KvCoder<K, V> inputKvCoder;
   private SystemReduceFn<K, V, Iterable<V>, Iterable<V>, BoundedWindow> reduceFn;
 
@@ -92,7 +81,6 @@ class GroupByWindowExecutor<K, V>
         mainTupleTag,
         sideOutputTags);
 
-    this.outputManager = new GroupByWindowOutputManager();
     UserGraphContext userGraphContext = context.getUserGraphContext();
     PCollection<KV<K, V>> input = (PCollection<KV<K, V>>) userGraphContext.getInput();
     this.inputKvCoder = (KvCoder<K, V>) input.getCoder();

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
index 292b771..e2139d8 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
@@ -180,12 +180,8 @@ class JStormStateInternals<K> implements StateInternals {
               kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
 
           Combine.CombineFn<Instant, Combine.Holder<Instant>, Instant> outputTimeCombineFn =
-              new BinaryCombineFn<Instant>() {
-                @Override
-                public Instant apply(Instant left, Instant right) {
-                  return timestampCombiner.combine(left, right);
-                }
-              };
+              new WatermarkCombineFn(timestampCombiner);
+
           return new JStormWatermarkHoldState(
               id, spec, namespace,
               new JStormCombiningState<>(
@@ -203,6 +199,19 @@ class JStormStateInternals<K> implements StateInternals {
     });
   }
 
+  private static class WatermarkCombineFn extends BinaryCombineFn<Instant> {
+    private final TimestampCombiner timestampCombiner;
+
+    public WatermarkCombineFn(TimestampCombiner timestampCombiner) {
+      this.timestampCombiner = timestampCombiner;
+    }
+
+    @Override
+    public Instant apply(Instant left, Instant right) {
+      return timestampCombiner.combine(left, right);
+    }
+  };
+
   /**
    * JStorm implementation of {@link ValueState}.
    */
@@ -623,7 +632,7 @@ class JStormStateInternals<K> implements StateInternals {
 
     @Override
     public ReadableState<V> get(K var1) {
-      ReadableState<V> ret = new MapReadableState<>(null);
+      ReadableState<V> ret = null;
       try {
         ret = new MapReadableState(kvStore.get(var1));
       } catch (IOException e) {
@@ -634,7 +643,7 @@ class JStormStateInternals<K> implements StateInternals {
 
     @Override
     public ReadableState<Iterable<K>> keys() {
-      ReadableState<Iterable<K>> ret = new MapReadableState<>(null);
+      ReadableState<Iterable<K>> ret = null;
       try {
         ret = new MapReadableState<>(kvStore.keys());
       } catch (IOException e) {
@@ -645,7 +654,7 @@ class JStormStateInternals<K> implements StateInternals {
 
     @Override
     public ReadableState<Iterable<V>> values() {
-      ReadableState<Iterable<V>> ret = new MapReadableState<>(null);
+      ReadableState<Iterable<V>> ret = null;
       try {
         ret = new MapReadableState<>(kvStore.values());
       } catch (IOException e) {
@@ -656,7 +665,7 @@ class JStormStateInternals<K> implements StateInternals {
 
     @Override
     public ReadableState<Iterable<Map.Entry<K, V>>> entries() {
-      ReadableState<Iterable<Map.Entry<K, V>>> ret = new MapReadableState<>(null);
+      ReadableState<Iterable<Map.Entry<K, V>>> ret = null;
       try {
         ret = new MapReadableState<>(kvStore.entries());
       } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java
index 5b60b03..e7f3285 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java
@@ -63,10 +63,8 @@ class MetricsReporter {
   }
 
   private void updateCounters(Iterable<MetricResult<Long>> counters) {
-    System.out.print("updateCounters");
     for (MetricResult<Long> metricResult : counters) {
       String metricName = getMetricNameString(COUNTER_PREFIX, metricResult);
-      System.out.print("metricName: " + metricName);
       Long updateValue = metricResult.attempted();
       Long oldValue = reportedCounters.get(metricName);
 

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java
index 138a5dc..f318a89 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java
@@ -32,26 +32,10 @@ import org.slf4j.LoggerFactory;
 
 /**
  * JStorm {@link Executor} for {@link DoFn} with multi-output.
- * @param <InputT>
- * @param <OutputT>
  */
 class MultiOutputDoFnExecutor<InputT, OutputT> extends DoFnExecutor<InputT, OutputT> {
   private static final Logger LOG = LoggerFactory.getLogger(MultiOutputDoFnExecutor.class);
 
-  /**
-   * For multi-output scenario,a "local" tuple tag is used in producer currently while a generated
-   * tag is used in downstream consumer. So before output, we need to map this "local" tag to
-   * "external" tag. See PCollectionTuple for details.
-   */
-  public class MultiOutputDoFnExecutorOutputManager extends DoFnExecutorOutputManager {
-    @Override
-    public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-      executorsBolt.processExecutorElem(tag, output);
-    }
-  }
-
-  protected Map<TupleTag<?>, TupleTag<?>> localTupleTagMap;
-
   public MultiOutputDoFnExecutor(
       String stepName,
       String description,
@@ -63,13 +47,9 @@ class MultiOutputDoFnExecutor<InputT, OutputT> extends DoFnExecutor<InputT, Outp
       Collection<PCollectionView<?>> sideInputs,
       Map<TupleTag, PCollectionView<?>> sideInputTagToView,
       TupleTag<OutputT> mainTupleTag,
-      List<TupleTag<?>> sideOutputTags,
-      Map<TupleTag<?>, TupleTag<?>> localTupleTagMap
+      List<TupleTag<?>> sideOutputTags
   ) {
     super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag,
         sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
-    this.localTupleTagMap = localTupleTagMap;
-    this.outputManager = new MultiOutputDoFnExecutorOutputManager();
-    LOG.info("localTupleTagMap: {}", localTupleTagMap);
   }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiStatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiStatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiStatefulDoFnExecutor.java
index a3ffc30..44c0765 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiStatefulDoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiStatefulDoFnExecutor.java
@@ -32,7 +32,6 @@ import org.apache.beam.sdk.values.WindowingStrategy;
 
 /**
  * JStorm {@link Executor} for stateful {@link DoFn} with multi-output.
- * @param <OutputT>
  */
 class MultiStatefulDoFnExecutor<OutputT> extends MultiOutputDoFnExecutor<KV, OutputT> {
 
@@ -42,9 +41,9 @@ class MultiStatefulDoFnExecutor<OutputT> extends MultiOutputDoFnExecutor<KV, Out
       Coder<WindowedValue<KV>> inputCoder, WindowingStrategy<?, ?> windowingStrategy,
       TupleTag<KV> mainInputTag, Collection<PCollectionView<?>> sideInputs,
       Map<TupleTag, PCollectionView<?>> sideInputTagToView, TupleTag<OutputT> mainTupleTag,
-      List<TupleTag<?>> sideOutputTags, Map<TupleTag<?>, TupleTag<?>> localTupleTagMap) {
+      List<TupleTag<?>> sideOutputTags) {
     super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag,
-        sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags, localTupleTagMap);
+        sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundMultiTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundMultiTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundMultiTranslator.java
index 7daa1cb..986af43 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundMultiTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundMultiTranslator.java
@@ -20,7 +20,6 @@ package org.apache.beam.runners.jstorm.translation;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import org.apache.beam.sdk.coders.Coder;
@@ -33,7 +32,6 @@ import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.PValueBase;
 import org.apache.beam.sdk.values.TupleTag;
 
 /**
@@ -50,12 +48,6 @@ class ParDoBoundMultiTranslator<InputT, OutputT>
     PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
 
     Map<TupleTag<?>, PValue> allOutputs = Maps.newHashMap(userGraphContext.getOutputs());
-    Map<TupleTag<?>, TupleTag<?>> localToExternalTupleTagMap = Maps.newHashMap();
-    for (Map.Entry<TupleTag<?>, PValue> entry : allOutputs.entrySet()) {
-      Iterator<TupleTag<?>> itr = ((PValueBase) entry.getValue()).expand().keySet().iterator();
-      localToExternalTupleTagMap.put(entry.getKey(), itr.next());
-    }
-
     TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
     List<TupleTag<?>> sideOutputTags = userGraphContext.getOutputTags();
     sideOutputTags.remove(mainOutputTag);
@@ -90,8 +82,7 @@ class ParDoBoundMultiTranslator<InputT, OutputT>
           transform.getSideInputs(),
           sideInputTagToView.build(),
           mainOutputTag,
-          sideOutputTags,
-          localToExternalTupleTagMap);
+          sideOutputTags);
     } else {
       executor = new MultiOutputDoFnExecutor<>(
           userGraphContext.getStepName(),
@@ -105,8 +96,7 @@ class ParDoBoundMultiTranslator<InputT, OutputT>
           transform.getSideInputs(),
           sideInputTagToView.build(),
           mainOutputTag,
-          sideOutputTags,
-          localToExternalTupleTagMap);
+          sideOutputTags);
     }
 
     context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java
deleted file mode 100644
index e6d09c4..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.TupleTag;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Translates a ParDo.Bound to a JStorm {@link DoFnExecutor}.
- */
-class ParDoBoundTranslator<InputT, OutputT>
-    extends TransformTranslator.Default<ParDo.SingleOutput<InputT, OutputT>> {
-
-  private static final Logger LOG = LoggerFactory.getLogger(ParDoBoundTranslator.class);
-
-  @Override
-  public void translateNode(
-      ParDo.SingleOutput<InputT, OutputT> transform, TranslationContext context) {
-    final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-    final TupleTag<?> inputTag = userGraphContext.getInputTag();
-    PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
-
-    TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
-    List<TupleTag<?>> sideOutputTags = Lists.newArrayList();
-
-    Map<TupleTag<?>, PValue> allInputs = Maps.newHashMap(userGraphContext.getInputs());
-    for (PCollectionView pCollectionView : transform.getSideInputs()) {
-      allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-    }
-    String description = describeTransform(
-        transform,
-        allInputs,
-        userGraphContext.getOutputs());
-
-    ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
-    for (PCollectionView pCollectionView : transform.getSideInputs()) {
-      sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-    }
-
-    DoFnExecutor executor;
-    DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
-    if (signature.stateDeclarations().size() > 0
-        || signature.timerDeclarations().size() > 0) {
-      executor = new StatefulDoFnExecutor<>(
-          userGraphContext.getStepName(),
-          description,
-          userGraphContext.getOptions(),
-          (DoFn<KV, OutputT>) transform.getFn(),
-          (Coder) WindowedValue.getFullCoder(
-              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-          input.getWindowingStrategy(),
-          (TupleTag<KV>) inputTag,
-          transform.getSideInputs(),
-          sideInputTagToView.build(),
-          mainOutputTag,
-          sideOutputTags);
-    } else {
-      executor = new DoFnExecutor<>(
-          userGraphContext.getStepName(),
-          description,
-          userGraphContext.getOptions(),
-          transform.getFn(),
-          WindowedValue.getFullCoder(
-              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-          input.getWindowingStrategy(),
-          (TupleTag<InputT>) inputTag,
-          transform.getSideInputs(),
-          sideInputTagToView.build(),
-          mainOutputTag,
-          sideOutputTags);
-    }
-
-    context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StatefulDoFnExecutor.java
index 911f259..70e2570 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StatefulDoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StatefulDoFnExecutor.java
@@ -32,7 +32,6 @@ import org.apache.beam.sdk.values.WindowingStrategy;
 
 /**
  * JStorm {@link Executor} for stateful {@link DoFn}.
- * @param <OutputT>
  */
 class StatefulDoFnExecutor<OutputT> extends DoFnExecutor<KV, OutputT> {
   public StatefulDoFnExecutor(

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java
index 24a9050..159fe70 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java
@@ -30,7 +30,7 @@ interface TimerService extends Serializable {
   void init(List<Integer> upStreamTasks);
 
   /**
-   *
+   * Update watermark when receiving watermark from a upstream task.
    * @param task
    * @param inputWatermark
    * @return new watermark if any timer is triggered during the update of watermark, otherwise 0

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java
index 6b463db..027fc14 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java
@@ -39,15 +39,15 @@ import org.joda.time.Instant;
  * Default implementation of {@link TimerService}.
  */
 class TimerServiceImpl implements TimerService {
-  private transient ExecutorContext executorContext;
-  private transient Map<Integer, DoFnExecutor> idToDoFnExecutor;
+  private ExecutorContext executorContext;
+  private Map<Integer, DoFnExecutor> idToDoFnExecutor;
 
   private final ConcurrentMap<Integer, Long> upStreamTaskToInputWatermark =
       new ConcurrentHashMap<>();
   private final PriorityQueue<Long> inputWatermarks = new PriorityQueue<>();
   private final PriorityQueue<Instant> watermarkHolds = new PriorityQueue<>();
   private final Map<String, Instant> namespaceToWatermarkHold = new HashMap<>();
-  private final transient PriorityQueue<TimerInternals.TimerData> eventTimeTimersQueue =
+  private final PriorityQueue<TimerInternals.TimerData> eventTimeTimersQueue =
       new PriorityQueue<>();
   private final Map<TimerInternals.TimerData, Set<Pair<Integer, Object>>>
       timerDataToKeyedExecutors = Maps.newHashMap();
@@ -132,7 +132,7 @@ class TimerServiceImpl implements TimerService {
     if (currentHold == null) {
       namespaceToWatermarkHold.put(namespace, watermarkHold);
       watermarkHolds.add(watermarkHold);
-    } else if (currentHold != null && watermarkHold.isBefore(currentHold)) {
+    } else if (watermarkHold.isBefore(currentHold)) {
       namespaceToWatermarkHold.put(namespace, watermarkHold);
       watermarkHolds.add(watermarkHold);
       watermarkHolds.remove(currentHold);

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java
index 4d431d3..f0b8f74 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java
@@ -38,8 +38,8 @@ interface TransformTranslator<T extends PTransform<?, ?>> {
   boolean canTranslate(T transform, TranslationContext context);
 
     /**
-     * Default translator.
-     * @param <T1>
+     * Default translator does NOT translate anything, but just generate
+     * the description of PTransform.
      */
   class Default<T1 extends PTransform<?, ?>> implements TransformTranslator<T1> {
     @Override
@@ -61,7 +61,11 @@ interface TransformTranslator<T extends PTransform<?, ?>> {
               .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
                 @Override
                 public String apply(Map.Entry<TupleTag<?>, PValue> taggedPValue) {
-                  return taggedPValue.getKey().getId();
+                  if (taggedPValue != null) {
+                    return taggedPValue.getKey().getId();
+                  } else {
+                    return null;
+                  }
                 }
               })),
           transform.getName(),
@@ -69,7 +73,11 @@ interface TransformTranslator<T extends PTransform<?, ?>> {
               .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
                 @Override
                 public String apply(Map.Entry<TupleTag<?>, PValue> taggedPvalue) {
-                  return taggedPvalue.getKey().getId();
+                  if (taggedPvalue != null) {
+                    return taggedPvalue.getKey().getId();
+                  } else {
+                    return null;
+                  }
                 }
               })));
     }

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
index 0991448..4407f15 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
@@ -198,8 +198,6 @@ public class TranslationContext {
       }
       bolt.addExecutor(tag, executor, userGraphContext.getStepName());
 
-      // filter all connections inside bolt
-      //if (!bolt.getOutputTags().contains(tag)) {
       Stream.Grouping grouping;
       if (isGBK) {
         grouping = Stream.Grouping.byFields(Arrays.asList(CommonInstance.KEY));
@@ -207,7 +205,6 @@ public class TranslationContext {
         grouping = Stream.Grouping.of(Stream.Grouping.Type.LOCAL_OR_SHUFFLE);
       }
       addStormStreamDef(TaggedPValue.of(tag, value), name, grouping);
-      //}
     }
 
     for (PValue sideInput : sideInputs) {
@@ -223,7 +220,7 @@ public class TranslationContext {
     // set parallelismNumber
     String pTransformfullName = userGraphContext.currentTransform.getFullName();
     String compositeName = pTransformfullName.split("/")[0];
-    Map parallelismNumMap = userGraphContext.getOptions().getParallelismNumMap();
+    Map parallelismNumMap = userGraphContext.getOptions().getParallelismMap();
     if (parallelismNumMap.containsKey(compositeName)) {
       int configNum = (Integer) parallelismNumMap.get(compositeName);
       int currNum = bolt.getParallelismNum();
@@ -262,10 +259,21 @@ public class TranslationContext {
       return (T) currentTransform.getInputs().values().iterator().next();
     }
 
-    public Map<TupleTag<?>, PValue> getInputs() {
+    public Map<TupleTag<?>, PValue> getTransformInputs() {
       return currentTransform.getInputs();
     }
 
+    /**
+     * Get input PValues with the output tags of upstream node.
+     */
+    public Map<TupleTag<?>, PValue> getInputs() {
+      Map<TupleTag<?>, PValue> ret = Maps.newHashMap();
+      for (PValue pValue : currentTransform.getInputs().values()) {
+        ret.put(findTupleTag(pValue), pValue);
+      }
+      return ret;
+    }
+
     public TupleTag<?> getInputTag() {
       return pValueToTupleTag.get(this.getInput());
     }

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
index 9eaa13a..c8ea545 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
@@ -40,7 +40,6 @@ class TranslatorRegistry {
   static {
     TRANSLATORS.put(Read.Bounded.class, new BoundedSourceTranslator());
     TRANSLATORS.put(Read.Unbounded.class, new UnboundedSourceTranslator());
-    TRANSLATORS.put(ParDo.SingleOutput.class, new ParDoBoundTranslator());
     TRANSLATORS.put(ParDo.MultiOutput.class, new ParDoBoundMultiTranslator());
     TRANSLATORS.put(Window.Assign.class, new WindowAssignTranslator<>());
     TRANSLATORS.put(Flatten.PCollections.class, new FlattenTranslator());

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java
index 4ae28e6..627a834 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java
@@ -30,6 +30,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.beam.runners.jstorm.JStormPipelineOptions;
 import org.apache.beam.sdk.io.UnboundedSource;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.util.WindowedValue;
@@ -61,7 +62,7 @@ public class UnboundedSourceSpout extends AbstractComponent implements IRichSpou
 
   private KryoSerializer<WindowedValue> serializer;
 
-  private long lastWaterMark = 0L;
+  private long lastWaterMark = BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis();
 
   public UnboundedSourceSpout(
       String name,
@@ -113,7 +114,7 @@ public class UnboundedSourceSpout extends AbstractComponent implements IRichSpou
   }
 
   @Override
-  public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+  public synchronized void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
     try {
       this.collector = collector;
       this.pipelineOptions =
@@ -127,7 +128,8 @@ public class UnboundedSourceSpout extends AbstractComponent implements IRichSpou
     }
   }
 
-  public void createSourceReader(UnboundedSource.CheckpointMark checkpointMark) throws IOException {
+  public synchronized void createSourceReader(UnboundedSource.CheckpointMark checkpointMark)
+      throws IOException {
     if (reader != null) {
       reader.close();
     }

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewTranslator.java
index 9ab5784..de3f568 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewTranslator.java
@@ -256,9 +256,7 @@ class ViewTranslator
   /**
    * Specialized expansion for
    * {@link org.apache.beam.sdk.transforms.Combine.GloballyAsSingletonView}.
-   * @param <InputT>
-   * @param <OutputT>
-     */
+   */
   public static class CombineGloballyAsSingletonView<InputT, OutputT>
       extends PTransform<PCollection<InputT>, PCollectionView<OutputT>> {
     Combine.GloballyAsSingletonView<InputT, OutputT> transform;

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignExecutor.java
index 8d60392..832c95c 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignExecutor.java
@@ -31,8 +31,6 @@ import org.slf4j.LoggerFactory;
 
 /**
  * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.windowing.Window.Assign}.
- * @param <T>
- * @param <W>
  */
 class WindowAssignExecutor<T, W extends BoundedWindow> implements Executor {
   private static final Logger LOG = LoggerFactory.getLogger(WindowAssignExecutor.class);

http://git-wip-us.apache.org/repos/asf/beam/blob/90ed2ef3/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/JStormStateInternalsTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/JStormStateInternalsTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/JStormStateInternalsTest.java
index b2ca267..3acf662 100644
--- a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/JStormStateInternalsTest.java
+++ b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/JStormStateInternalsTest.java
@@ -64,7 +64,7 @@ public class JStormStateInternalsTest {
     IKvStoreManager kvStoreManager = RocksDbKvStoreManagerFactory.getManager(
         Maps.newHashMap(),
         "test",
-        tmp.toString(),
+        tmp.getRoot().toString(),
         new KryoSerializer(Maps.newHashMap()));
     jstormStateInternals = new JStormStateInternals(
         "key-1", kvStoreManager, new TimerServiceImpl(), 0);


[03/53] [abbrv] beam git commit: jstorm-runner: rename the package to org.apache.beam.runners.jstorm.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
new file mode 100644
index 0000000..f101beb
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime.state;
+
+import com.alibaba.jstorm.cache.IKvStore;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.sdk.state.MapState;
+import org.apache.beam.sdk.state.ReadableState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Map;
+
+public class JStormMapState<K, V> implements MapState<K, V> {
+    private static final Logger LOG = LoggerFactory.getLogger(JStormMapState.class);
+
+    private final K key;
+    private final StateNamespace namespace;
+    private IKvStore<K, V> kvStore;
+
+    public JStormMapState(K key, StateNamespace namespace, IKvStore<K, V> kvStore) {
+        this.key = key;
+        this.namespace = namespace;
+        this.kvStore = kvStore;
+    }
+
+    @Override
+    public void put(K var1, V var2) {
+        try {
+            kvStore.put(var1, var2);
+        } catch (IOException e) {
+            reportError(String.format("Failed to put key=%s, value=%s", var1, var2), e);
+        }
+    }
+
+    @Override
+    public ReadableState<V> putIfAbsent(K var1, V var2) {
+        ReadableState<V> ret = null;
+        try {
+            V value = kvStore.get(var1);
+            if (value == null) {
+                kvStore.put(var1, var2);
+                ret = new MapReadableState<>(null);
+            } else {
+                ret = new MapReadableState<>(value);
+            }
+        } catch (IOException e) {
+            reportError(String.format("Failed to putIfAbsent key=%s, value=%s", var1, var2), e);
+        }
+        return ret;
+    }
+
+    @Override
+    public void remove(K var1) {
+        try {
+            kvStore.remove(var1);
+        } catch (IOException e) {
+            reportError(String.format("Failed to remove key=%s", var1), e);
+        }
+    }
+
+    @Override
+    public ReadableState<V> get(K var1) {
+        ReadableState<V> ret = new MapReadableState<>(null);
+        try {
+            ret = new MapReadableState(kvStore.get(var1));
+        } catch (IOException e) {
+            reportError(String.format("Failed to get value for key=%s", var1), e);
+        }
+        return ret;
+    }
+
+    @Override
+    public ReadableState<Iterable<K>> keys() {
+        ReadableState<Iterable<K>> ret = new MapReadableState<>(null);
+        try {
+            ret = new MapReadableState<>(kvStore.keys());
+        } catch (IOException e) {
+            reportError(String.format("Failed to get keys"), e);
+        }
+        return ret;
+    }
+
+    @Override
+    public ReadableState<Iterable<V>> values() {
+        ReadableState<Iterable<V>> ret = new MapReadableState<>(null);
+        try {
+            ret = new MapReadableState<>(kvStore.values());
+        } catch (IOException e) {
+            reportError(String.format("Failed to get values"), e);
+        }
+        return ret;
+    }
+
+    @Override
+    public ReadableState<Iterable<Map.Entry<K, V>>> entries() {
+        ReadableState<Iterable<Map.Entry<K, V>>> ret = new MapReadableState<>(null);
+        try {
+            ret = new MapReadableState<>(kvStore.entries());
+        } catch (IOException e) {
+            reportError(String.format("Failed to get values"), e);
+        }
+        return ret;
+    }
+
+    @Override
+    public void clear() {
+        try {
+            Iterable<K> keys = kvStore.keys();
+            kvStore.removeBatch(keys);
+        } catch (IOException e) {
+            reportError(String.format("Failed to clear map state"), e);
+        }
+    }
+
+    private void reportError(String errorInfo, IOException e) {
+        LOG.error(errorInfo, e);
+        throw new RuntimeException(errorInfo);
+    }
+
+    private class MapReadableState<T> implements ReadableState<T> {
+        private T value;
+
+        public MapReadableState(T value) {
+            this.value = value;
+        }
+
+        @Override
+        public T read() {
+            return value;
+        }
+
+        @Override
+        public ReadableState<T> readLater() {
+            return this;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java
new file mode 100644
index 0000000..8a0cb73
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java
@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime.state;
+
+import org.apache.beam.runners.jstorm.translation.runtime.TimerService;
+import com.alibaba.jstorm.cache.ComposedKey;
+import com.alibaba.jstorm.cache.IKvStoreManager;
+
+import org.apache.beam.runners.core.StateInternals;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.runners.core.StateTag;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.state.BagState;
+import org.apache.beam.sdk.state.CombiningState;
+import org.apache.beam.sdk.state.MapState;
+import org.apache.beam.sdk.state.SetState;
+import org.apache.beam.sdk.state.State;
+import org.apache.beam.sdk.state.StateBinder;
+import org.apache.beam.sdk.state.StateContext;
+import org.apache.beam.sdk.state.StateSpec;
+import org.apache.beam.sdk.state.ValueState;
+import org.apache.beam.sdk.state.WatermarkHoldState;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.Combine.BinaryCombineFn;
+import org.apache.beam.sdk.transforms.CombineWithContext;
+import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
+import org.joda.time.Instant;
+
+import javax.annotation.Nullable;
+import java.io.IOException;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+/**
+ * JStorm implementation of {@link StateInternals}.
+ */
+public class JStormStateInternals<K> implements StateInternals {
+
+    private static final String STATE_INFO = "state-info:";
+
+    @Nullable
+    private final K key;
+    private final IKvStoreManager kvStoreManager;
+    private final TimerService timerService;
+    private final int executorId;
+
+    public JStormStateInternals(K key, IKvStoreManager kvStoreManager,
+                                TimerService timerService, int executorId) {
+        this.key = key;
+        this.kvStoreManager = checkNotNull(kvStoreManager, "kvStoreManager");
+        this.timerService = checkNotNull(timerService, "timerService");
+        this.executorId = executorId;
+    }
+
+    @Nullable
+    @Override
+    public K getKey() {
+        return key;
+    }
+
+    @Override
+    public <T extends State> T state(
+        StateNamespace namespace, StateTag<T> address, StateContext<?> c) {
+        // throw new UnsupportedOperationException("StateContext is not supported.");
+        /**
+         * TODO:
+         * Same implementation as state() which is without StateContext. This might be updated after
+         * we figure out if we really need StateContext for JStorm state internals.
+         */
+        return state(namespace, address);
+    }
+
+    @Override
+    public <T extends State> T state(final StateNamespace namespace, StateTag<T> address) {
+        return address.getSpec().bind(address.getId(), new StateBinder() {
+            @Override
+            public <T> ValueState<T> bindValue(String id, StateSpec<ValueState<T>> spec, Coder<T> coder) {
+                try {
+                    return new JStormValueState<>(
+                            getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)));
+                } catch (IOException e) {
+                    throw new RuntimeException();
+                }
+            }
+
+            @Override
+            public <T> BagState<T> bindBag(String id, StateSpec<BagState<T>> spec, Coder<T> elemCoder) {
+                try {
+                    return new JStormBagState(
+                            getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)),
+                            kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
+                } catch (IOException e) {
+                    throw new RuntimeException();
+                }
+            }
+
+            @Override
+            public <T> SetState<T> bindSet(String id, StateSpec<SetState<T>> spec, Coder<T> elemCoder) {
+                throw new UnsupportedOperationException();
+            }
+
+            @Override
+            public <KeyT, ValueT> MapState<KeyT, ValueT> bindMap(
+                String id,
+                StateSpec<MapState<KeyT, ValueT>> spec,
+                Coder<KeyT> mapKeyCoder,
+                Coder<ValueT> mapValueCoder) {
+                try {
+                    return new JStormMapState<>(getKey(), namespace, kvStoreManager.<KeyT, ValueT>getOrCreate(getStoreId(id)));
+                } catch (IOException e) {
+                    throw new RuntimeException(e);
+                }
+            }
+
+            @Override
+            public <InputT, AccumT, OutputT> CombiningState bindCombining(
+                    String id,
+                    StateSpec<CombiningState<InputT, AccumT, OutputT>> spec,
+                    Coder<AccumT> accumCoder,
+                    Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
+                try {
+                    BagState<AccumT> accumBagState = new JStormBagState(
+                            getKey(), namespace,
+                            kvStoreManager.<ComposedKey, AccumT>getOrCreate(getStoreId(id)),
+                            kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
+                    return new JStormCombiningState<>(accumBagState, combineFn);
+                } catch (IOException e) {
+                    throw new RuntimeException();
+                }
+            }
+
+
+            @Override
+            public <InputT, AccumT, OutputT> CombiningState<InputT, AccumT, OutputT>
+            bindCombiningWithContext(
+                String id,
+                StateSpec<CombiningState<InputT, AccumT, OutputT>> stateSpec, Coder<AccumT> coder,
+                CombineWithContext.CombineFnWithContext<InputT, AccumT, OutputT> combineFnWithContext) {
+                throw new UnsupportedOperationException();
+            }
+
+            @Override
+            public WatermarkHoldState bindWatermark(
+                String id,
+                StateSpec<WatermarkHoldState> spec,
+                final TimestampCombiner timestampCombiner) {
+                try {
+                    BagState<Combine.Holder<Instant>> accumBagState = new JStormBagState(
+                            getKey(), namespace,
+                            kvStoreManager.<ComposedKey, Combine.Holder<Instant>>getOrCreate(getStoreId(id)),
+                            kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
+
+                    Combine.CombineFn<Instant, Combine.Holder<Instant>, Instant> outputTimeCombineFn =
+                            new BinaryCombineFn<Instant>() {
+                                @Override
+                                public Instant apply(Instant left, Instant right) {
+                                  return timestampCombiner.combine(left, right);
+                                }};
+                    return new JStormWatermarkHoldState(
+                            namespace,
+                            new JStormCombiningState<>(
+                                    accumBagState,
+                                    outputTimeCombineFn),
+                            timestampCombiner,
+                            timerService);
+                } catch (IOException e) {
+                    throw new RuntimeException();
+                }
+            }
+        });
+    }
+
+    private String getStoreId(String stateId) {
+        return String.format("%s-%s", stateId, executorId);
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java
new file mode 100644
index 0000000..5ad3663
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime.state;
+
+import com.alibaba.jstorm.cache.ComposedKey;
+import com.alibaba.jstorm.cache.IKvStore;
+
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.sdk.state.ValueState;
+
+import javax.annotation.Nullable;
+import java.io.IOException;
+
+/**
+ * JStorm implementation of {@link ValueState}.
+ */
+public class JStormValueState<K, T> implements ValueState<T> {
+
+    @Nullable
+    private final K key;
+    private final StateNamespace namespace;
+    private final IKvStore<ComposedKey, T> kvState;
+
+    JStormValueState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState) {
+        this.key = key;
+        this.namespace = namespace;
+        this.kvState = kvState;
+    }
+
+    @Override
+    public void write(T t) {
+        try {
+            kvState.put(getComposedKey(), t);
+        } catch (IOException e) {
+            throw new RuntimeException(String.format(
+                    "Failed to write key: %s, namespace: %s, value: %s.", key, namespace, t));
+        }
+    }
+
+    @Override
+    public T read() {
+        try {
+            return kvState.get(getComposedKey());
+        } catch (IOException e) {
+            throw new RuntimeException(String.format(
+                    "Failed to read key: %s, namespace: %s.", key, namespace));
+        }
+    }
+
+    @Override
+    public ValueState<T> readLater() {
+        // TODO: support prefetch.
+        return this;
+    }
+
+    @Override
+    public void clear() {
+        try {
+            kvState.remove(getComposedKey());
+        } catch (IOException e) {
+            throw new RuntimeException(String.format(
+                    "Failed to clear key: %s, namespace: %s.", key, namespace));
+        }
+    }
+
+    private ComposedKey getComposedKey() {
+        return ComposedKey.of(key, namespace);
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java
new file mode 100644
index 0000000..659d77c
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime.state;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import org.apache.beam.runners.jstorm.translation.runtime.TimerService;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.sdk.state.GroupingState;
+import org.apache.beam.sdk.state.ReadableState;
+import org.apache.beam.sdk.state.WatermarkHoldState;
+import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
+import org.joda.time.Instant;
+
+/**
+ * JStorm implementation of {@link WatermarkHoldState}.
+ */
+public class JStormWatermarkHoldState implements WatermarkHoldState {
+
+    private final StateNamespace namespace;
+    private final GroupingState<Instant, Instant> watermarkHoldsState;
+    private final TimestampCombiner timestampCombiner;
+    private final TimerService timerService;
+
+    JStormWatermarkHoldState(
+            StateNamespace namespace,
+            GroupingState<Instant, Instant> watermarkHoldsState,
+            TimestampCombiner timestampCombiner,
+            TimerService timerService) {
+        this.namespace = checkNotNull(namespace, "namespace");
+        this.watermarkHoldsState = checkNotNull(watermarkHoldsState, "watermarkHoldsState");
+        this.timestampCombiner = checkNotNull(timestampCombiner, "timestampCombiner");
+        this.timerService = checkNotNull(timerService, "timerService");
+    }
+
+    @Override
+    public TimestampCombiner getTimestampCombiner() {
+        return timestampCombiner;
+    }
+
+    @Override
+    public void add(Instant instant) {
+        timerService.addWatermarkHold(namespace.stringKey(), instant);
+        watermarkHoldsState.add(instant);
+    }
+
+    @Override
+    public ReadableState<Boolean> isEmpty() {
+        return watermarkHoldsState.isEmpty();
+    }
+
+    @Override
+    public Instant read() {
+        return watermarkHoldsState.read();
+    }
+
+    @Override
+    public WatermarkHoldState readLater() {
+        // TODO: support prefetch.
+        return this;
+    }
+
+    @Override
+    public void clear() {
+        timerService.clearWatermarkHold(namespace.stringKey());
+        watermarkHoldsState.clear();
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java
new file mode 100644
index 0000000..4b5f83c
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime.timer;
+
+import org.apache.beam.runners.jstorm.translation.runtime.DoFnExecutor;
+import org.apache.beam.runners.jstorm.translation.runtime.TimerService;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.sdk.state.TimeDomain;
+import org.joda.time.Instant;
+
+import javax.annotation.Nullable;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+/**
+ * JStorm implementation of {@link TimerInternals}.
+ */
+public class JStormTimerInternals<K> implements TimerInternals {
+
+    private final K key;
+    private final DoFnExecutor<?, ?> doFnExecutor;
+    private final TimerService timerService;
+
+
+    public JStormTimerInternals(@Nullable K key, DoFnExecutor<?, ?> doFnExecutor, TimerService timerService) {
+        this.key = key;
+        this.doFnExecutor = checkNotNull(doFnExecutor, "doFnExecutor");
+        this.timerService = checkNotNull(timerService, "timerService");
+    }
+
+    @Override
+    public void setTimer(StateNamespace namespace, String timerId, Instant target, TimeDomain timeDomain) {
+        setTimer(TimerData.of(timerId, namespace, target, timeDomain));
+    }
+
+    @Override
+    @Deprecated
+    public void setTimer(TimerData timerData) {
+        timerService.setTimer(key, timerData, doFnExecutor);
+    }
+
+    @Override
+    public void deleteTimer(StateNamespace namespace, String timerId, TimeDomain timeDomain) {
+        throw new UnsupportedOperationException(
+                "Canceling of a timer is not yet supported.");
+    }
+
+    @Override
+    @Deprecated
+    public void deleteTimer(StateNamespace namespace, String timerId) {
+        throw new UnsupportedOperationException(
+                "Canceling of a timer is not yet supported.");
+    }
+
+    @Override
+    @Deprecated
+    public void deleteTimer(TimerData timerData) {
+        throw new UnsupportedOperationException(
+                "Canceling of a timer is not yet supported.");
+    }
+
+    @Override
+    public Instant currentProcessingTime() {
+        return Instant.now();
+    }
+
+    @Override
+    @Nullable
+    public Instant currentSynchronizedProcessingTime() {
+        return null;
+    }
+
+    @Override
+    public Instant currentInputWatermarkTime() {
+        return new Instant(timerService.currentInputWatermark());
+    }
+
+    @Override
+    @Nullable
+    public Instant currentOutputWatermarkTime() {
+        return new Instant(timerService.currentOutputWatermark());
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java
new file mode 100644
index 0000000..9651fc2
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
+
+import org.apache.beam.runners.core.construction.UnboundedReadFromBoundedSource;
+import org.apache.beam.sdk.io.Read;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TaggedPValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ * Translates a {@link Read.Bounded} into a Storm spout.
+ *
+ * @param <T>
+ */
+public class BoundedSourceTranslator<T> extends TransformTranslator.Default<Read.Bounded<T>> {
+
+    @Override
+    public void translateNode(Read.Bounded<T> transform, TranslationContext context) {
+        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+
+        TupleTag<?> outputTag = userGraphContext.getOutputTag();
+        PValue outputValue = userGraphContext.getOutput();
+        UnboundedSourceSpout spout = new UnboundedSourceSpout(
+                description,
+                new UnboundedReadFromBoundedSource.BoundedToUnboundedSourceAdapter(transform.getSource()),
+                userGraphContext.getOptions(), outputTag);
+
+        context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(outputTag, outputValue));
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java
new file mode 100644
index 0000000..c4da58a
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.sdk.transforms.Combine;
+
+public class CombineGloballyTranslator<InputT, OutputT> extends TransformTranslator.Default<Combine.Globally<InputT, OutputT>> {
+    
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java
new file mode 100644
index 0000000..99cbff7
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.sdk.transforms.Combine;
+
+public class CombinePerKeyTranslator<K, InputT, OutputT> extends TransformTranslator.Default<Combine.PerKey<K, InputT, OutputT>> {
+    
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
new file mode 100644
index 0000000..4558216
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import com.google.common.collect.Maps;
+import org.apache.beam.sdk.transforms.Flatten;
+
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.runners.jstorm.translation.runtime.FlattenExecutor;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+import java.util.Map;
+
+public class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PCollections<V>> {
+
+    @Override
+    public void translateNode(Flatten.PCollections<V> transform, TranslationContext context) {
+        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+
+        // Since a new tag is created in PCollectionList, retrieve the real tag here.
+        Map<TupleTag<?>, PValue> inputs = Maps.newHashMap();
+        for (Map.Entry<TupleTag<?>, PValue> entry : userGraphContext.getInputs().entrySet()) {
+            PCollection<V> pc = (PCollection<V>) entry.getValue();
+            inputs.putAll(pc.expand());
+        }
+        System.out.println("Real inputs: " + inputs);
+        System.out.println("FlattenList inputs: " + userGraphContext.getInputs());
+        String description = describeTransform(transform, inputs, userGraphContext.getOutputs());
+        FlattenExecutor executor = new FlattenExecutor(description, userGraphContext.getOutputTag());
+        context.addTransformExecutor(executor, inputs, userGraphContext.getOutputs());
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
new file mode 100644
index 0000000..6b8297b
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.runners.jstorm.translation.runtime.GroupByWindowExecutor;
+import com.google.common.collect.Lists;
+import org.apache.beam.sdk.transforms.GroupByKey;
+
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.sdk.values.WindowingStrategy;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+
+import java.util.Collections;
+import java.util.List;
+
+public class GroupByKeyTranslator<K, V> extends TransformTranslator.Default<GroupByKey<K, V>> {
+    // information of transform
+    protected PCollection<KV<K, V>> input;
+    protected PCollection<KV<K, Iterable<V>>> output;
+    protected List<TupleTag<?>> inputTags;
+    protected TupleTag<KV<K, Iterable<V>>> mainOutputTag;
+    protected List<TupleTag<?>> sideOutputTags;
+    protected List<PCollectionView<?>> sideInputs;
+    protected WindowingStrategy<?, ?> windowingStrategy;
+
+    @Override
+    public void translateNode(GroupByKey<K, V> transform, TranslationContext context) {
+        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+
+        input = (PCollection<KV<K, V>>) userGraphContext.getInput();
+        output = (PCollection<KV<K, Iterable<V>>>) userGraphContext.getOutput();
+
+        inputTags = userGraphContext.getInputTags();
+        mainOutputTag = (TupleTag<KV<K, Iterable<V>>>) userGraphContext.getOutputTag();
+        sideOutputTags = Lists.newArrayList();
+
+        sideInputs = Collections.<PCollectionView<?>>emptyList();
+        windowingStrategy = input.getWindowingStrategy();
+
+        GroupByWindowExecutor<K, V> groupByWindowExecutor = new GroupByWindowExecutor<>(
+                userGraphContext.getStepName(),
+                description,
+                context,
+                context.getUserGraphContext().getOptions(),
+                windowingStrategy,
+                mainOutputTag,
+                sideOutputTags);
+        context.addTransformExecutor(groupByWindowExecutor);
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
new file mode 100644
index 0000000..c487578
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import avro.shaded.com.google.common.collect.Maps;
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.runners.jstorm.translation.runtime.DoFnExecutor;
+import org.apache.beam.runners.jstorm.translation.runtime.MultiOutputDoFnExecutor;
+import org.apache.beam.runners.jstorm.translation.runtime.MultiStatefulDoFnExecutor;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
+import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.*;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Translates a ParDo.BoundMulti to a Storm {@link DoFnExecutor}.
+ */
+public class ParDoBoundMultiTranslator<InputT, OutputT>
+        extends TransformTranslator.Default<ParDo.MultiOutput<InputT, OutputT>> {
+
+    @Override
+    public void translateNode(ParDo.MultiOutput<InputT, OutputT> transform, TranslationContext context) {
+        final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+        final TupleTag<InputT> inputTag = (TupleTag<InputT>) userGraphContext.getInputTag();
+        PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
+
+        Map<TupleTag<?>, PValue> allOutputs = Maps.newHashMap(userGraphContext.getOutputs());
+        Map<TupleTag<?>, TupleTag<?>> localToExternalTupleTagMap = Maps.newHashMap();
+        for (Map.Entry<TupleTag<?>, PValue> entry : allOutputs.entrySet()) {
+            Iterator<TupleTag<?>> itr = ((PValueBase) entry.getValue()).expand().keySet().iterator();
+            localToExternalTupleTagMap.put(entry.getKey(), itr.next());
+        }
+
+        TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
+        List<TupleTag<?>> sideOutputTags = userGraphContext.getOutputTags();
+        sideOutputTags.remove(mainOutputTag);
+
+        Map<TupleTag<?>, PValue> allInputs = Maps.newHashMap(userGraphContext.getInputs());
+        for (PCollectionView pCollectionView : transform.getSideInputs()) {
+            allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+        }
+        String description = describeTransform(
+                transform,
+                allInputs,
+                allOutputs);
+
+        ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
+        for (PCollectionView pCollectionView : transform.getSideInputs()) {
+            sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+        }
+
+        DoFnExecutor executor;
+        DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
+        if (signature.stateDeclarations().size() > 0
+                || signature.timerDeclarations().size() > 0) {
+            executor = new MultiStatefulDoFnExecutor<>(
+                    userGraphContext.getStepName(),
+                    description,
+                    userGraphContext.getOptions(),
+                    (DoFn<KV, OutputT>) transform.getFn(),
+                    (Coder) WindowedValue.getFullCoder(input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+                    input.getWindowingStrategy(),
+                    (TupleTag<KV>) inputTag,
+                    transform.getSideInputs(),
+                    sideInputTagToView.build(),
+                    mainOutputTag,
+                    sideOutputTags,
+                    localToExternalTupleTagMap);
+        } else {
+            executor = new MultiOutputDoFnExecutor<>(
+                    userGraphContext.getStepName(),
+                    description,
+                    userGraphContext.getOptions(),
+                    transform.getFn(),
+                    WindowedValue.getFullCoder(input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+                    input.getWindowingStrategy(),
+                    inputTag,
+                    transform.getSideInputs(),
+                    sideInputTagToView.build(),
+                    mainOutputTag,
+                    sideOutputTags,
+                    localToExternalTupleTagMap);
+        }
+
+        context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
new file mode 100644
index 0000000..3a952a9
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import java.util.List;
+import java.util.Map;
+
+import avro.shaded.com.google.common.collect.Lists;
+import org.apache.beam.runners.jstorm.translation.runtime.StatefulDoFnExecutor;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import org.apache.beam.runners.jstorm.translation.runtime.DoFnExecutor;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
+import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.*;
+
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Translates a ParDo.Bound to a Storm {@link DoFnExecutor}.
+ */
+public class ParDoBoundTranslator<InputT, OutputT>
+        extends TransformTranslator.Default<ParDo.SingleOutput<InputT, OutputT>> {
+
+    private static final Logger LOG = LoggerFactory.getLogger(ParDoBoundTranslator.class);
+
+    @Override
+    public void translateNode(ParDo.SingleOutput<InputT, OutputT> transform, TranslationContext context) {
+        final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+        final TupleTag<?> inputTag = userGraphContext.getInputTag();
+        PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
+
+        TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
+        List<TupleTag<?>> sideOutputTags = Lists.newArrayList();
+
+        Map<TupleTag<?>, PValue> allInputs = avro.shaded.com.google.common.collect.Maps.newHashMap(userGraphContext.getInputs());
+        for (PCollectionView pCollectionView : transform.getSideInputs()) {
+            allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+        }
+        String description = describeTransform(
+                transform,
+                allInputs,
+                userGraphContext.getOutputs());
+
+        ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
+        for (PCollectionView pCollectionView : transform.getSideInputs()) {
+            sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
+        }
+
+        DoFnExecutor executor;
+        DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
+        if (signature.stateDeclarations().size() > 0
+                || signature.timerDeclarations().size() > 0) {
+            executor = new StatefulDoFnExecutor<>(
+                    userGraphContext.getStepName(),
+                    description,
+                    userGraphContext.getOptions(),
+                    (DoFn<KV, OutputT>) transform.getFn(),
+                    (Coder) WindowedValue.getFullCoder(
+                            input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+                    input.getWindowingStrategy(),
+                    (TupleTag<KV>) inputTag,
+                    transform.getSideInputs(),
+                    sideInputTagToView.build(),
+                    mainOutputTag,
+                    sideOutputTags);
+        } else {
+            executor = new DoFnExecutor<>(
+                    userGraphContext.getStepName(),
+                    description,
+                    userGraphContext.getOptions(),
+                    transform.getFn(),
+                    WindowedValue.getFullCoder(input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
+                    input.getWindowingStrategy(),
+                    (TupleTag<InputT>) inputTag,
+                    transform.getSideInputs(),
+                    sideInputTagToView.build(),
+                    mainOutputTag,
+                    sideOutputTags);
+        }
+
+        context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java
new file mode 100644
index 0000000..1ef1ec3
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.sdk.transforms.Reshuffle;
+
+public class ReshuffleTranslator<K, V> extends TransformTranslator.Default<Reshuffle<K,V>> {
+    
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
new file mode 100644
index 0000000..9f69391
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import com.google.auto.value.AutoValue;
+
+import javax.annotation.Nullable;
+import java.util.List;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+/**
+ * Class that defines the stream connection between upstream and downstream components.
+ */
+@AutoValue
+public abstract class Stream {
+
+    public abstract Producer getProducer();
+    public abstract Consumer getConsumer();
+
+    public static Stream of(Producer producer, Consumer consumer) {
+        return new com.alibaba.jstorm.beam.translation.translator.AutoValue_Stream(producer, consumer);
+    }
+
+    @AutoValue
+    public abstract static class Producer {
+        public abstract String getComponentId();
+        public abstract String getStreamId();
+        public abstract String getStreamName();
+
+        public static Producer of(String componentId, String streamId, String streamName) {
+            return new com.alibaba.jstorm.beam.translation.translator.AutoValue_Stream_Producer(
+                    componentId, streamId, streamName);
+        }
+    }
+
+    @AutoValue
+    public abstract static class Consumer {
+        public abstract String getComponentId();
+        public abstract Grouping getGrouping();
+
+        public static Consumer of(String componentId, Grouping grouping) {
+            return new com.alibaba.jstorm.beam.translation.translator.AutoValue_Stream_Consumer(
+                    componentId, grouping);
+        }
+    }
+
+    @AutoValue
+    public abstract static class Grouping {
+        public abstract Type getType();
+
+        @Nullable
+        public abstract List<String> getFields();
+
+        public static Grouping of(Type type) {
+            checkArgument(!Type.FIELDS.equals(type), "Fields grouping should provide key fields.");
+            return new com.alibaba.jstorm.beam.translation.translator.AutoValue_Stream_Grouping(
+                    type, null /* fields */);
+        }
+
+        public static Grouping byFields(List<String> fields) {
+            checkNotNull(fields, "fields");
+            checkArgument(!fields.isEmpty(), "No key fields were provided for field grouping!");
+            return new com.alibaba.jstorm.beam.translation.translator.AutoValue_Stream_Grouping(
+                    Type.FIELDS, fields);
+        }
+
+        /**
+         * Types of stream groupings Storm allows
+         */
+        public enum Type {
+            ALL, CUSTOM, DIRECT, SHUFFLE, LOCAL_OR_SHUFFLE, FIELDS, GLOBAL, NONE
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
new file mode 100644
index 0000000..bebdf7b
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.collect.FluentIterable;
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.sdk.transforms.PTransform;
+
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+import java.util.Map;
+
+/**
+ * Interface for classes capable of tranforming Beam PTransforms into Storm primitives.
+ */
+public interface TransformTranslator<T extends PTransform<?, ?>> {
+
+    void translateNode(T transform, TranslationContext context);
+
+    /**
+     * Returns true if this translator can translate the given transform.
+     */
+    boolean canTranslate(T transform, TranslationContext context);
+
+    class Default<T1 extends PTransform<?, ?>> implements TransformTranslator<T1> {
+        @Override
+        public void translateNode(T1 transform, TranslationContext context) {
+
+        }
+
+        @Override
+        public boolean canTranslate(T1 transform, TranslationContext context) {
+            return true;
+        }
+
+        static String describeTransform(
+                PTransform<?, ?> transform,
+                Map<TupleTag<?>, PValue> inputs,
+                Map<TupleTag<?>, PValue> outputs) {
+            return String.format("%s --> %s --> %s",
+                    Joiner.on('+').join(FluentIterable.from(inputs.entrySet())
+                            .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
+                                @Override
+                                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPValue) {
+                                    return taggedPValue.getKey().getId();
+                                    // return taggedPValue.getValue().getName();
+                                }})),
+                    transform.getName(),
+                    Joiner.on('+').join(FluentIterable.from(outputs.entrySet())
+                            .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
+                                @Override
+                                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPvalue) {
+                                    return taggedPvalue.getKey().getId();
+                                    //return taggedPValue.getValue().getName();
+                                }})));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java
new file mode 100644
index 0000000..ac7d7bd
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.sdk.io.Read;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TaggedPValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
+
+/**
+ * Translates a Read.Unbounded into a Storm spout.
+ * 
+ * @param <T>
+ */
+public class UnboundedSourceTranslator<T> extends TransformTranslator.Default<Read.Unbounded<T>> {
+    public void translateNode(Read.Unbounded<T> transform, TranslationContext context) {
+        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+
+        TupleTag<?> tag = userGraphContext.getOutputTag();
+        PValue output = userGraphContext.getOutput();
+
+        UnboundedSourceSpout spout = new UnboundedSourceSpout(
+                description,
+                transform.getSource(), userGraphContext.getOptions(), tag);
+        context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(tag, output));
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
new file mode 100644
index 0000000..0ebf837
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
@@ -0,0 +1,374 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.runners.jstorm.translation.runtime.ViewExecutor;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.CoderRegistry;
+import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.coders.ListCoder;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.View;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.PCollectionViews;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A {@link TransformTranslator} for executing {@link View Views} in JStorm runner.
+ */
+public class ViewTranslator extends TransformTranslator.Default<ViewTranslator.CreateJStormPCollectionView<?, ?>> {
+    @Override
+    public void translateNode(CreateJStormPCollectionView<?, ?> transform, TranslationContext context) {
+        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+        ViewExecutor viewExecutor = new ViewExecutor(description, userGraphContext.getOutputTag());
+        context.addTransformExecutor(viewExecutor);
+    }
+
+    /**
+     * Specialized implementation for
+     * {@link org.apache.beam.sdk.transforms.View.AsMap View.AsMap}
+     * for the Flink runner in streaming mode.
+     */
+    public static class ViewAsMap<K, V>
+            extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, V>>> {
+
+        @SuppressWarnings("unused") // used via reflection in JstormRunner#apply()
+        public ViewAsMap(View.AsMap<K, V> transform) {
+        }
+
+        @Override
+        public PCollectionView<Map<K, V>> expand(PCollection<KV<K, V>> input) {
+            PCollectionView<Map<K, V>> view =
+                    PCollectionViews.mapView(
+                            input,
+                            input.getWindowingStrategy(),
+                            input.getCoder());
+
+            @SuppressWarnings({"rawtypes", "unchecked"})
+            KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
+            try {
+                inputCoder.getKeyCoder().verifyDeterministic();
+            } catch (Coder.NonDeterministicException e) {
+                // TODO: log warning as other runners.
+            }
+
+            return input
+                    .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
+                    .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, V>>of(view));
+        }
+
+        @Override
+        protected String getKindString() {
+            return "StreamingViewAsMap";
+        }
+    }
+
+    /**
+     * Specialized expansion for {@link
+     * View.AsMultimap View.AsMultimap} for the
+     * Flink runner in streaming mode.
+     */
+    public static class ViewAsMultimap<K, V>
+            extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, Iterable<V>>>> {
+
+        /**
+         * Builds an instance of this class from the overridden transform.
+         */
+        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+        public ViewAsMultimap(View.AsMultimap<K, V> transform) {
+        }
+
+        @Override
+        public PCollectionView<Map<K, Iterable<V>>> expand(PCollection<KV<K, V>> input) {
+            PCollectionView<Map<K, Iterable<V>>> view =
+                    PCollectionViews.multimapView(
+                            input,
+                            input.getWindowingStrategy(),
+                            input.getCoder());
+
+            @SuppressWarnings({"rawtypes", "unchecked"})
+            KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
+            try {
+                inputCoder.getKeyCoder().verifyDeterministic();
+            } catch (Coder.NonDeterministicException e) {
+                // TODO: log warning as other runners.
+            }
+
+            return input
+                    .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
+                    .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, Iterable<V>>>of(view));
+        }
+
+        @Override
+        protected String getKindString() {
+            return "StreamingViewAsMultimap";
+        }
+    }
+
+    /**
+     * Specialized implementation for
+     * {@link View.AsList View.AsList} for the
+     * JStorm runner in streaming mode.
+     */
+    public static class ViewAsList<T>
+            extends PTransform<PCollection<T>, PCollectionView<List<T>>> {
+        /**
+         * Builds an instance of this class from the overridden transform.
+         */
+        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+        public ViewAsList(View.AsList<T> transform) {}
+
+        @Override
+        public PCollectionView<List<T>> expand(PCollection<T> input) {
+            PCollectionView<List<T>> view =
+                    PCollectionViews.listView(
+                            input,
+                            input.getWindowingStrategy(),
+                            input.getCoder());
+
+            return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
+                    .apply(CreateJStormPCollectionView.<T, List<T>>of(view));
+        }
+
+        @Override
+        protected String getKindString() {
+            return "StreamingViewAsList";
+        }
+    }
+
+    /**
+     * Specialized implementation for
+     * {@link View.AsIterable View.AsIterable} for the
+     * JStorm runner in streaming mode.
+     */
+    public static class ViewAsIterable<T>
+            extends PTransform<PCollection<T>, PCollectionView<Iterable<T>>> {
+        /**
+         * Builds an instance of this class from the overridden transform.
+         */
+        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+        public ViewAsIterable(View.AsIterable<T> transform) { }
+
+        @Override
+        public PCollectionView<Iterable<T>> expand(PCollection<T> input) {
+            PCollectionView<Iterable<T>> view =
+                    PCollectionViews.iterableView(
+                            input,
+                            input.getWindowingStrategy(),
+                            input.getCoder());
+
+            return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
+                    .apply(CreateJStormPCollectionView.<T, Iterable<T>>of(view));
+        }
+
+        @Override
+        protected String getKindString() {
+            return "StreamingViewAsIterable";
+        }
+    }
+
+    /**
+     * Specialized expansion for
+     * {@link View.AsSingleton View.AsSingleton} for the
+     * JStorm runner in streaming mode.
+     */
+    public static class ViewAsSingleton<T>
+            extends PTransform<PCollection<T>, PCollectionView<T>> {
+        private View.AsSingleton<T> transform;
+
+        /**
+         * Builds an instance of this class from the overridden transform.
+         */
+        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+        public ViewAsSingleton(View.AsSingleton<T> transform) {
+            this.transform = transform;
+        }
+
+        @Override
+        public PCollectionView<T> expand(PCollection<T> input) {
+            Combine.Globally<T, T> combine = Combine.globally(
+                    new SingletonCombine<>(transform.hasDefaultValue(), transform.defaultValue()));
+            if (!transform.hasDefaultValue()) {
+                combine = combine.withoutDefaults();
+            }
+            return input.apply(combine.asSingletonView());
+        }
+
+        @Override
+        protected String getKindString() {
+            return "StreamingViewAsSingleton";
+        }
+
+        private static class SingletonCombine<T> extends Combine.BinaryCombineFn<T> {
+            private boolean hasDefaultValue;
+            private T defaultValue;
+
+            SingletonCombine(boolean hasDefaultValue, T defaultValue) {
+                this.hasDefaultValue = hasDefaultValue;
+                this.defaultValue = defaultValue;
+            }
+
+            @Override
+            public T apply(T left, T right) {
+                throw new IllegalArgumentException("PCollection with more than one element "
+                        + "accessed as a singleton view. Consider using Combine.globally().asSingleton() to "
+                        + "combine the PCollection into a single value");
+            }
+
+            @Override
+            public T identity() {
+                if (hasDefaultValue) {
+                    return defaultValue;
+                } else {
+                    throw new IllegalArgumentException(
+                            "Empty PCollection accessed as a singleton view. "
+                                    + "Consider setting withDefault to provide a default value");
+                }
+            }
+        }
+    }
+
+    public static class CombineGloballyAsSingletonView<InputT, OutputT>
+            extends PTransform<PCollection<InputT>, PCollectionView<OutputT>> {
+        Combine.GloballyAsSingletonView<InputT, OutputT> transform;
+
+        /**
+         * Builds an instance of this class from the overridden transform.
+         */
+        @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply()
+        public CombineGloballyAsSingletonView(
+                Combine.GloballyAsSingletonView<InputT, OutputT> transform) {
+            this.transform = transform;
+        }
+
+        @Override
+        public PCollectionView<OutputT> expand(PCollection<InputT> input) {
+            PCollection<OutputT> combined =
+                    input.apply(Combine.globally(transform.getCombineFn())
+                            .withoutDefaults()
+                            .withFanout(transform.getFanout()));
+
+            PCollectionView<OutputT> view = PCollectionViews.singletonView(
+                    combined,
+                    combined.getWindowingStrategy(),
+                    transform.getInsertDefault(),
+                    transform.getInsertDefault()
+                            ? transform.getCombineFn().defaultValue() : null,
+                    combined.getCoder());
+            return combined
+                    .apply(ParDo.of(new WrapAsList<OutputT>()))
+                    .apply(CreateJStormPCollectionView.<OutputT, OutputT>of(view));
+        }
+
+        @Override
+        protected String getKindString() {
+            return "StreamingCombineGloballyAsSingletonView";
+        }
+    }
+
+    private static class WrapAsList<T> extends DoFn<T, List<T>> {
+        @ProcessElement
+        public void processElement(ProcessContext c) {
+            c.output(Collections.singletonList(c.element()));
+        }
+    }
+
+    /**
+     * Combiner that combines {@code T}s into a single {@code List<T>} containing all inputs.
+     * They require the input {@link PCollection} fits in memory.
+     * For a large {@link PCollection} this is expected to crash!
+     *
+     * @param <T> the type of elements to concatenate.
+     */
+    private static class Concatenate<T> extends Combine.CombineFn<T, List<T>, List<T>> {
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public List<T> createAccumulator() {
+            return new ArrayList<>();
+        }
+
+        @Override
+        public List<T> addInput(List<T> accumulator, T input) {
+            accumulator.add(input);
+            return accumulator;
+        }
+
+        @Override
+        public List<T> mergeAccumulators(Iterable<List<T>> accumulators) {
+            List<T> result = createAccumulator();
+            for (List<T> accumulator : accumulators) {
+                result.addAll(accumulator);
+            }
+            return result;
+        }
+
+        @Override
+        public List<T> extractOutput(List<T> accumulator) {
+            return accumulator;
+        }
+
+        @Override
+        public Coder<List<T>> getAccumulatorCoder(CoderRegistry registry, Coder<T> inputCoder) {
+            return ListCoder.of(inputCoder);
+        }
+
+        @Override
+        public Coder<List<T>> getDefaultOutputCoder(CoderRegistry registry, Coder<T> inputCoder) {
+            return ListCoder.of(inputCoder);
+        }
+    }
+
+    /**
+     * Creates a primitive {@link PCollectionView}.
+     *
+     * <p>For internal use only by runner implementors.
+     *
+     * @param <ElemT> The type of the elements of the input PCollection
+     * @param <ViewT> The type associated with the {@link PCollectionView} used as a side input
+     */
+    public static class CreateJStormPCollectionView<ElemT, ViewT>
+            extends PTransform<PCollection<List<ElemT>>, PCollectionView<ViewT>> {
+        private PCollectionView<ViewT> view;
+
+        private CreateJStormPCollectionView(PCollectionView<ViewT> view) {
+            this.view = view;
+        }
+
+        public static <ElemT, ViewT> CreateJStormPCollectionView<ElemT, ViewT> of(
+                PCollectionView<ViewT> view) {
+            return new CreateJStormPCollectionView<>(view);
+        }
+
+        @Override
+        public PCollectionView<ViewT> expand(PCollection<List<ElemT>> input) {
+            return view;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
new file mode 100644
index 0000000..0bf9a49
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.runners.jstorm.translation.runtime.WindowAssignExecutor;
+import org.apache.beam.sdk.transforms.windowing.Window;
+
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+
+public class WindowAssignTranslator<T> extends TransformTranslator.Default<Window.Assign<T>> {
+
+    @Override
+    public void translateNode(Window.Assign<T> transform, TranslationContext context) {
+        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+        context.getUserGraphContext().setWindowed();
+        WindowAssignExecutor executor = new WindowAssignExecutor(
+                description,
+                transform.getWindowFn(),
+                userGraphContext.getOutputTag());
+        context.addTransformExecutor(executor);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java
new file mode 100644
index 0000000..b67aff9
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.translator;
+
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.sdk.transforms.windowing.FixedWindows;
+import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
+import org.apache.beam.sdk.transforms.windowing.Window;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Translates a Window.Bound node into a Storm WindowedBolt
+ * 
+ * @param <T>
+ */
+public class WindowBoundTranslator<T> extends TransformTranslator.Default<Window.Assign<T>> {
+    private static final Logger LOG = LoggerFactory.getLogger(WindowBoundTranslator.class);
+
+    // Do nothing here currently. The assign of window strategy is included in AssignTranslator.
+    @Override
+    public void translateNode(Window.Assign<T> transform, TranslationContext context) {
+        if (transform.getWindowFn() instanceof FixedWindows) {
+            context.getUserGraphContext().setWindowed();
+        } else if (transform.getWindowFn() instanceof SlidingWindows) {
+            context.getUserGraphContext().setWindowed();
+        } else {
+            throw new UnsupportedOperationException("Not supported window type currently: " + transform.getWindowFn());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
new file mode 100644
index 0000000..07a3ad5
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.util;
+
+public class CommonInstance {
+    public static final String KEY = "Key";
+    public static final String VALUE = "Value";
+
+    public static final String BEAM_WATERMARK_STREAM_ID = "BEAM_WATERMARK";
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java
new file mode 100644
index 0000000..87562fd
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.util;
+
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.runners.core.SideInputReader;
+import org.apache.beam.sdk.values.PCollectionView;
+
+import javax.annotation.Nullable;
+import java.io.Serializable;
+
+/**
+ * No-op SideInputReader implementation.
+ */
+public class DefaultSideInputReader implements SideInputReader, Serializable {
+    @Nullable
+    @Override
+    public <T> T get(PCollectionView<T> pCollectionView, BoundedWindow boundedWindow) {
+        return null;
+    }
+
+    @Override
+    public <T> boolean contains(PCollectionView<T> pCollectionView) {
+        return false;
+    }
+
+    @Override
+    public boolean isEmpty() {
+        return true;
+    }
+}


[43/53] [abbrv] beam git commit: ReshuffleTest: replace Iterable equal tests with matchers.

Posted by pe...@apache.org.
ReshuffleTest: replace Iterable equal tests with matchers.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/e182cf75
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/e182cf75
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/e182cf75

Branch: refs/heads/jstorm-runner
Commit: e182cf75eb3d5a4701a98a2f5687cf0ea9d51774
Parents: 1178f9f
Author: Pei He <pe...@apache.org>
Authored: Wed Jul 19 20:38:49 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:03:00 2017 +0800

----------------------------------------------------------------------
 .../beam/sdk/transforms/ReshuffleTest.java      | 22 ++++++++++++++------
 1 file changed, 16 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/e182cf75/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ReshuffleTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ReshuffleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ReshuffleTest.java
index 3cd7cf9..0eb8e2d 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ReshuffleTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ReshuffleTest.java
@@ -17,7 +17,10 @@
  */
 package org.apache.beam.sdk.transforms;
 
+import static org.apache.beam.sdk.TestUtils.KvMatcher.isKv;
 import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
+import static org.hamcrest.core.Is.is;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 
@@ -70,9 +73,16 @@ public class ReshuffleTest implements Serializable {
         KV.of("k1", 3),
         KV.of("k2", 4));
 
-  private static final List<KV<String, Iterable<Integer>>> GROUPED_TESTABLE_KVS = ImmutableList.of(
-        KV.of("k1", (Iterable<Integer>) ImmutableList.of(3)),
-        KV.of("k2", (Iterable<Integer>) ImmutableList.of(4)));
+  private static class AssertThatHasExpectedContents
+      implements SerializableFunction<Iterable<KV<String, Iterable<Integer>>>, Void> {
+    @Override
+    public Void apply(Iterable<KV<String, Iterable<Integer>>> actual) {
+      assertThat(actual, containsInAnyOrder(
+          isKv(is("k1"), containsInAnyOrder(3)),
+          isKv(is("k2"), containsInAnyOrder(4))));
+      return null;
+    }
+  }
 
   @Rule
   public final transient TestPipeline pipeline = TestPipeline.create();
@@ -167,7 +177,7 @@ public class ReshuffleTest implements Serializable {
     PCollection<KV<String, Iterable<Integer>>> output = input
         .apply(Reshuffle.<String, Iterable<Integer>>of());
 
-    PAssert.that(output).containsInAnyOrder(GROUPED_TESTABLE_KVS);
+    PAssert.that(output).satisfies(new AssertThatHasExpectedContents());
 
     assertEquals(
         input.getWindowingStrategy(),
@@ -190,7 +200,7 @@ public class ReshuffleTest implements Serializable {
     PCollection<KV<String, Iterable<Integer>>> output = input
         .apply(Reshuffle.<String, Iterable<Integer>>of());
 
-    PAssert.that(output).containsInAnyOrder(GROUPED_TESTABLE_KVS);
+    PAssert.that(output).satisfies(new AssertThatHasExpectedContents());
 
     assertEquals(
         input.getWindowingStrategy(),
@@ -213,7 +223,7 @@ public class ReshuffleTest implements Serializable {
     PCollection<KV<String, Iterable<Integer>>> output = input
         .apply(Reshuffle.<String, Iterable<Integer>>of());
 
-    PAssert.that(output).containsInAnyOrder(GROUPED_TESTABLE_KVS);
+    PAssert.that(output).satisfies(new AssertThatHasExpectedContents());
 
     assertEquals(
         input.getWindowingStrategy(),


[35/53] [abbrv] beam git commit: jstorm-runner: 1. Add kryo serializer for Collections.SingletonLists 2. Fix concurrent problem of elementIndex of JStormBagState

Posted by pe...@apache.org.
jstorm-runner:
1. Add kryo serializer for Collections.SingletonLists
2. Fix concurrent problem of elementIndex of JStormBagState


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/201ef722
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/201ef722
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/201ef722

Branch: refs/heads/jstorm-runner
Commit: 201ef722ec36b0ffa8197722fdf898fb9978803c
Parents: 1bf3224
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Wed Jul 19 20:15:56 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:59 2017 +0800

----------------------------------------------------------------------
 .../beam/runners/jstorm/JStormRunner.java       |  2 +
 .../serialization/CollectionsSerializer.java    | 43 +++++++++++++++++++
 .../jstorm/translation/ExecutorsBolt.java       |  2 +-
 .../translation/JStormStateInternals.java       | 44 +++++++++++++-------
 4 files changed, 75 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/201ef722/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
index 286a975..56db1c6 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
@@ -31,6 +31,7 @@ import com.alibaba.jstorm.cluster.StormConfig;
 import com.alibaba.jstorm.transactional.TransactionTopologyBuilder;
 import java.util.HashMap;
 import java.util.Map;
+import org.apache.beam.runners.jstorm.serialization.CollectionsSerializer;
 import org.apache.beam.runners.jstorm.serialization.ImmutableListSerializer;
 import org.apache.beam.runners.jstorm.serialization.ImmutableMapSerializer;
 import org.apache.beam.runners.jstorm.serialization.ImmutableSetSerializer;
@@ -105,6 +106,7 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
     SdkRepackImmuSetSerializer.registerSerializers(config);
     ImmutableMapSerializer.registerSerializers(config);
     SdkRepackImmutableMapSerializer.registerSerializers(config);
+    CollectionsSerializer.registerSerializers(config);
 
     config.registerDefaultSerailizer(KvStoreIterable.class, KvStoreIterableSerializer.class);
     return config;

http://git-wip-us.apache.org/repos/asf/beam/blob/201ef722/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java
new file mode 100644
index 0000000..0548196
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java
@@ -0,0 +1,43 @@
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+
+import java.util.Collections;
+import java.util.List;
+
+
+/**
+ * Specific serializer of {@link Kryo} for Collections.
+ */
+public class CollectionsSerializer {
+
+  /**
+   * Specific {@link Kryo} serializer for {@link java.util.Collections.SingletonList}.
+   */
+  public static class CollectionsSingletonListSerializer extends Serializer<List<?>> {
+    public CollectionsSingletonListSerializer() {
+      setImmutable(true);
+    }
+
+    @Override
+    public List<?> read(final Kryo kryo, final Input input, final Class<List<?>> type) {
+      final Object obj = kryo.readClassAndObject(input);
+      return Collections.singletonList(obj);
+    }
+
+    @Override
+    public void write(final Kryo kryo, final Output output, final List<?> list) {
+      kryo.writeClassAndObject(output, list.get(0));
+    }
+
+  }
+
+  public static void registerSerializers(Config config) {
+    config.registerSerialization(Collections.singletonList("").getClass(),
+            CollectionsSingletonListSerializer.class);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/201ef722/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
index ce6ea2c..33393f2 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
@@ -240,7 +240,7 @@ public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
   }
 
   public <T> void processExecutorElem(TupleTag<T> inputTag, WindowedValue<T> elem) {
-    LOG.debug("ProcessExecutorElem: inputTag={}, value={}", inputTag, elem.getValue());
+    LOG.debug("ProcessExecutorElem: value={} from tag={}", elem.getValue(), inputTag);
     if (elem != null) {
       Executor executor = inputTagToExecutor.get(inputTag);
       if (executor != null) {

http://git-wip-us.apache.org/repos/asf/beam/blob/201ef722/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
index 3b6b4d5..68a17e5 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
@@ -261,7 +261,6 @@ class JStormStateInternals<K> implements StateInternals {
     private final StateNamespace namespace;
     private final IKvStore<ComposedKey, T> kvState;
     private final IKvStore<ComposedKey, Object> stateInfoKvState;
-    private int elemIndex;
 
     JStormBagState(
         @Nullable K key,
@@ -272,17 +271,19 @@ class JStormStateInternals<K> implements StateInternals {
       this.namespace = checkNotNull(namespace, "namespace");
       this.kvState = checkNotNull(kvState, "kvState");
       this.stateInfoKvState = checkNotNull(stateInfoKvState, "stateInfoKvState");
+    }
 
-      Integer index = (Integer) stateInfoKvState.get(getComposedKey());
-      this.elemIndex = index != null ? ++index : 0;
+    private int getElementIndex() throws IOException {
+      Integer elementIndex = (Integer) stateInfoKvState.get(getComposedKey());
+      return elementIndex != null ? elementIndex : 0;
     }
 
     @Override
     public void add(T input) {
       try {
+        int elemIndex = getElementIndex();
         kvState.put(getComposedKey(elemIndex), input);
-        stateInfoKvState.put(getComposedKey(), elemIndex);
-        elemIndex++;
+        stateInfoKvState.put(getComposedKey(), ++elemIndex);
       } catch (IOException e) {
         throw new RuntimeException(e.getCause());
       }
@@ -293,7 +294,12 @@ class JStormStateInternals<K> implements StateInternals {
       return new ReadableState<Boolean>() {
         @Override
         public Boolean read() {
-          return elemIndex <= 0;
+          try {
+            return getElementIndex() <= 0;
+          } catch (IOException e) {
+            LOG.error("Failed to read", e);
+            return false;
+          }
         }
 
         @Override
@@ -306,7 +312,7 @@ class JStormStateInternals<K> implements StateInternals {
 
     @Override
     public Iterable<T> read() {
-      return new BagStateIterable(elemIndex);
+      return new BagStateIterable();
     }
 
     @Override
@@ -318,11 +324,11 @@ class JStormStateInternals<K> implements StateInternals {
     @Override
     public void clear() {
       try {
+        int elemIndex = getElementIndex();
         for (int i = 0; i < elemIndex; i++) {
           kvState.remove(getComposedKey(i));
         }
         stateInfoKvState.remove(getComposedKey());
-        elemIndex = 0;
       } catch (IOException e) {
         throw new RuntimeException(e.getCause());
       }
@@ -336,6 +342,18 @@ class JStormStateInternals<K> implements StateInternals {
       return ComposedKey.of(key, namespace, elemIndex);
     }
 
+    @Override
+    public String toString() {
+      int elemIndex = -1;
+      try {
+        elemIndex = getElementIndex();
+      } catch (IOException e) {
+
+      }
+      return String.format("JStormBagState: key=%s, namespace=%s, elementIndex=%d",
+              key, namespace, elemIndex);
+    }
+
     /**
      * Implementation of Bag state Iterable.
      */
@@ -346,13 +364,11 @@ class JStormStateInternals<K> implements StateInternals {
         private int cursor = 0;
 
         BagStateIterator() {
-          Integer s = null;
           try {
-            s = (Integer) stateInfoKvState.get(getComposedKey());
+            this.size = getElementIndex();
           } catch (IOException e) {
-            LOG.error("Failed to get elemIndex for key={}", getComposedKey());
+            throw new RuntimeException(e.getCause());
           }
-          this.size = s != null ? ++s : 0;
         }
 
         @Override
@@ -382,10 +398,8 @@ class JStormStateInternals<K> implements StateInternals {
         }
       }
 
-      private final int size;
+      BagStateIterable() {
 
-      BagStateIterable(int size) {
-        this.size = size;
       }
 
       @Override


[40/53] [abbrv] beam git commit: jstorm-runner: VM crashs during ValidatesRunner tests disable reuse forked VM.

Posted by pe...@apache.org.
jstorm-runner: VM crashs during ValidatesRunner tests disable reuse forked VM.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/4d634ecf
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/4d634ecf
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/4d634ecf

Branch: refs/heads/jstorm-runner
Commit: 4d634ecf008e7fe6d1a99770da8bb66f7513b7e0
Parents: 588a698
Author: Pei He <pe...@apache.org>
Authored: Wed Jul 19 14:36:30 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:59 2017 +0800

----------------------------------------------------------------------
 runners/jstorm/pom.xml | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/4d634ecf/runners/jstorm/pom.xml
----------------------------------------------------------------------
diff --git a/runners/jstorm/pom.xml b/runners/jstorm/pom.xml
index 5d54d94..79634e9 100644
--- a/runners/jstorm/pom.xml
+++ b/runners/jstorm/pom.xml
@@ -67,6 +67,7 @@
                     org.apache.beam.sdk.testing.UsesTestStream
                   </excludedGroups>
                   <parallel>none</parallel>
+                  <reuseForks>false</reuseForks>
                   <failIfNoTests>true</failIfNoTests>
                   <dependenciesToScan>
                     <dependency>org.apache.beam:beam-sdks-java-core</dependency>


[27/53] [abbrv] beam git commit: jstorm-runner: move jstorm state implementations to JStormStateInternals inner classes.

Posted by pe...@apache.org.
jstorm-runner: move jstorm state implementations to JStormStateInternals inner classes.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/9abbbd06
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/9abbbd06
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/9abbbd06

Branch: refs/heads/jstorm-runner
Commit: 9abbbd064e878a961ff3e8fc62d96ea650fd7570
Parents: 8cdd41b
Author: Pei He <pe...@apache.org>
Authored: Fri Jul 14 16:10:29 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:57 2017 +0800

----------------------------------------------------------------------
 .../jstorm/translation/JStormBagState.java      | 180 -------
 .../translation/JStormCombiningState.java       |  88 ----
 .../jstorm/translation/JStormMapState.java      | 158 -------
 .../translation/JStormStateInternals.java       | 464 +++++++++++++++++++
 .../jstorm/translation/JStormValueState.java    |  82 ----
 .../translation/JStormWatermarkHoldState.java   |  82 ----
 6 files changed, 464 insertions(+), 590 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/9abbbd06/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormBagState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormBagState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormBagState.java
deleted file mode 100644
index 3e5d52b..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormBagState.java
+++ /dev/null
@@ -1,180 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import com.alibaba.jstorm.cache.ComposedKey;
-import com.alibaba.jstorm.cache.IKvStore;
-import com.alibaba.jstorm.cache.KvStoreIterable;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-import javax.annotation.Nullable;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Implementation of {@link BagState} in JStorm runner.
- */
-class JStormBagState<K, T> implements BagState<T> {
-  private static final Logger LOG = LoggerFactory.getLogger(JStormBagState.class);
-
-  @Nullable
-  private final K key;
-  private final StateNamespace namespace;
-  private final IKvStore<ComposedKey, T> kvState;
-  private final IKvStore<ComposedKey, Object> stateInfoKvState;
-  private int elemIndex;
-
-  public JStormBagState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState,
-                        IKvStore<ComposedKey, Object> stateInfoKvState) throws IOException {
-    this.key = key;
-    this.namespace = checkNotNull(namespace, "namespace");
-    this.kvState = checkNotNull(kvState, "kvState");
-    this.stateInfoKvState = checkNotNull(stateInfoKvState, "stateInfoKvState");
-
-    Integer index = (Integer) stateInfoKvState.get(getComposedKey());
-    this.elemIndex = index != null ? ++index : 0;
-  }
-
-  @Override
-  public void add(T input) {
-    try {
-      kvState.put(getComposedKey(elemIndex), input);
-      stateInfoKvState.put(getComposedKey(), elemIndex);
-      elemIndex++;
-    } catch (IOException e) {
-      throw new RuntimeException(e.getCause());
-    }
-  }
-
-  @Override
-  public ReadableState<Boolean> isEmpty() {
-    return new ReadableState<Boolean>() {
-      @Override
-      public Boolean read() {
-        return elemIndex <= 0;
-      }
-
-      @Override
-      public ReadableState<Boolean> readLater() {
-        // TODO: support prefetch.
-        return this;
-      }
-    };
-  }
-
-  @Override
-  public Iterable<T> read() {
-    return new BagStateIterable(elemIndex);
-  }
-
-  @Override
-  public BagState readLater() {
-    // TODO: support prefetch.
-    return this;
-  }
-
-  @Override
-  public void clear() {
-    try {
-      for (int i = 0; i < elemIndex; i++) {
-        kvState.remove(getComposedKey(i));
-      }
-      stateInfoKvState.remove(getComposedKey());
-      elemIndex = 0;
-    } catch (IOException e) {
-      throw new RuntimeException(e.getCause());
-    }
-  }
-
-  private ComposedKey getComposedKey() {
-    return ComposedKey.of(key, namespace);
-  }
-
-  private ComposedKey getComposedKey(int elemIndex) {
-    return ComposedKey.of(key, namespace, elemIndex);
-  }
-
-  /**
-   * Implementation of Bag state Iterable.
-   */
-  private class BagStateIterable implements KvStoreIterable<T> {
-
-    private class BagStateIterator implements Iterator<T> {
-      private final int size;
-      private int cursor = 0;
-
-      BagStateIterator() {
-        Integer s = null;
-        try {
-          s = (Integer) stateInfoKvState.get(getComposedKey());
-        } catch (IOException e) {
-          LOG.error("Failed to get elemIndex for key={}", getComposedKey());
-        }
-        this.size = s != null ? ++s : 0;
-      }
-
-      @Override
-      public boolean hasNext() {
-        return cursor < size;
-      }
-
-      @Override
-      public T next() {
-        if (cursor >= size) {
-          throw new NoSuchElementException();
-        }
-
-        T value = null;
-        try {
-          value = kvState.get(getComposedKey(cursor));
-        } catch (IOException e) {
-          LOG.error("Failed to read composed key-[{}]", getComposedKey(cursor));
-        }
-        cursor++;
-        return value;
-      }
-
-      @Override
-      public void remove() {
-        throw new UnsupportedOperationException();
-      }
-    }
-
-    private final int size;
-
-    BagStateIterable(int size) {
-      this.size = size;
-    }
-
-    @Override
-    public Iterator<T> iterator() {
-      return new BagStateIterator();
-    }
-
-    @Override
-    public String toString() {
-      return String.format("BagStateIterable: composedKey=%s", getComposedKey());
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/9abbbd06/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormCombiningState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormCombiningState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormCombiningState.java
deleted file mode 100644
index 6bd021f..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormCombiningState.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import javax.annotation.Nullable;
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.CombiningState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.apache.beam.sdk.transforms.Combine;
-
-/**
- * JStorm implementation of {@link CombiningState}.
- */
-class JStormCombiningState<InputT, AccumT, OutputT>
-    implements CombiningState<InputT, AccumT, OutputT> {
-
-  @Nullable
-  private final BagState<AccumT> accumBagState;
-  private final Combine.CombineFn<InputT, AccumT, OutputT> combineFn;
-
-  JStormCombiningState(
-      BagState<AccumT> accumBagState,
-      Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
-    this.accumBagState = checkNotNull(accumBagState, "accumBagState");
-    this.combineFn = checkNotNull(combineFn, "combineFn");
-  }
-
-  @Override
-  public AccumT getAccum() {
-    // TODO: replacing the accumBagState with the merged accum.
-    return combineFn.mergeAccumulators(accumBagState.read());
-  }
-
-  @Override
-  public void addAccum(AccumT accumT) {
-    accumBagState.add(accumT);
-  }
-
-  @Override
-  public AccumT mergeAccumulators(Iterable<AccumT> iterable) {
-    return combineFn.mergeAccumulators(iterable);
-  }
-
-  @Override
-  public void add(InputT input) {
-    accumBagState.add(
-        combineFn.addInput(combineFn.createAccumulator(), input));
-  }
-
-  @Override
-  public ReadableState<Boolean> isEmpty() {
-    return accumBagState.isEmpty();
-  }
-
-  @Override
-  public OutputT read() {
-    return combineFn.extractOutput(
-        combineFn.mergeAccumulators(accumBagState.read()));
-  }
-
-  @Override
-  public CombiningState<InputT, AccumT, OutputT> readLater() {
-    // TODO: support prefetch.
-    return this;
-  }
-
-  @Override
-  public void clear() {
-    accumBagState.clear();
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/9abbbd06/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormMapState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormMapState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormMapState.java
deleted file mode 100644
index 6a4e376..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormMapState.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation;
-
-import com.alibaba.jstorm.cache.IKvStore;
-import java.io.IOException;
-import java.util.Map;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.MapState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Implementation of {@link MapState} in JStorm runner.
- * @param <K>
- * @param <V>
- */
-class JStormMapState<K, V> implements MapState<K, V> {
-  private static final Logger LOG = LoggerFactory.getLogger(JStormMapState.class);
-
-  private final K key;
-  private final StateNamespace namespace;
-  private IKvStore<K, V> kvStore;
-
-  public JStormMapState(K key, StateNamespace namespace, IKvStore<K, V> kvStore) {
-    this.key = key;
-    this.namespace = namespace;
-    this.kvStore = kvStore;
-  }
-
-  @Override
-  public void put(K var1, V var2) {
-    try {
-      kvStore.put(var1, var2);
-    } catch (IOException e) {
-      reportError(String.format("Failed to put key=%s, value=%s", var1, var2), e);
-    }
-  }
-
-  @Override
-  public ReadableState<V> putIfAbsent(K var1, V var2) {
-    ReadableState<V> ret = null;
-    try {
-      V value = kvStore.get(var1);
-      if (value == null) {
-        kvStore.put(var1, var2);
-        ret = new MapReadableState<>(null);
-      } else {
-        ret = new MapReadableState<>(value);
-      }
-    } catch (IOException e) {
-      reportError(String.format("Failed to putIfAbsent key=%s, value=%s", var1, var2), e);
-    }
-    return ret;
-  }
-
-  @Override
-  public void remove(K var1) {
-    try {
-      kvStore.remove(var1);
-    } catch (IOException e) {
-      reportError(String.format("Failed to remove key=%s", var1), e);
-    }
-  }
-
-  @Override
-  public ReadableState<V> get(K var1) {
-    ReadableState<V> ret = new MapReadableState<>(null);
-    try {
-      ret = new MapReadableState(kvStore.get(var1));
-    } catch (IOException e) {
-      reportError(String.format("Failed to get value for key=%s", var1), e);
-    }
-    return ret;
-  }
-
-  @Override
-  public ReadableState<Iterable<K>> keys() {
-    ReadableState<Iterable<K>> ret = new MapReadableState<>(null);
-    try {
-      ret = new MapReadableState<>(kvStore.keys());
-    } catch (IOException e) {
-      reportError(String.format("Failed to get keys"), e);
-    }
-    return ret;
-  }
-
-  @Override
-  public ReadableState<Iterable<V>> values() {
-    ReadableState<Iterable<V>> ret = new MapReadableState<>(null);
-    try {
-      ret = new MapReadableState<>(kvStore.values());
-    } catch (IOException e) {
-      reportError(String.format("Failed to get values"), e);
-    }
-    return ret;
-  }
-
-  @Override
-  public ReadableState<Iterable<Map.Entry<K, V>>> entries() {
-    ReadableState<Iterable<Map.Entry<K, V>>> ret = new MapReadableState<>(null);
-    try {
-      ret = new MapReadableState<>(kvStore.entries());
-    } catch (IOException e) {
-      reportError(String.format("Failed to get values"), e);
-    }
-    return ret;
-  }
-
-  @Override
-  public void clear() {
-    try {
-      Iterable<K> keys = kvStore.keys();
-      kvStore.removeBatch(keys);
-    } catch (IOException e) {
-      reportError(String.format("Failed to clear map state"), e);
-    }
-  }
-
-  private void reportError(String errorInfo, IOException e) {
-    LOG.error(errorInfo, e);
-    throw new RuntimeException(errorInfo);
-  }
-
-  private class MapReadableState<T> implements ReadableState<T> {
-    private T value;
-
-    public MapReadableState(T value) {
-      this.value = value;
-    }
-
-    @Override
-    public T read() {
-      return value;
-    }
-
-    @Override
-    public ReadableState<T> readLater() {
-      return this;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/9abbbd06/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
index 78882f2..3b6b4d5 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
@@ -20,8 +20,13 @@ package org.apache.beam.runners.jstorm.translation;
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import com.alibaba.jstorm.cache.ComposedKey;
+import com.alibaba.jstorm.cache.IKvStore;
 import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.alibaba.jstorm.cache.KvStoreIterable;
 import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.NoSuchElementException;
 import javax.annotation.Nullable;
 import org.apache.beam.runners.core.StateInternals;
 import org.apache.beam.runners.core.StateNamespace;
@@ -29,7 +34,9 @@ import org.apache.beam.runners.core.StateTag;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.state.BagState;
 import org.apache.beam.sdk.state.CombiningState;
+import org.apache.beam.sdk.state.GroupingState;
 import org.apache.beam.sdk.state.MapState;
+import org.apache.beam.sdk.state.ReadableState;
 import org.apache.beam.sdk.state.SetState;
 import org.apache.beam.sdk.state.State;
 import org.apache.beam.sdk.state.StateBinder;
@@ -42,12 +49,16 @@ import org.apache.beam.sdk.transforms.Combine.BinaryCombineFn;
 import org.apache.beam.sdk.transforms.CombineWithContext;
 import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
 import org.joda.time.Instant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * JStorm implementation of {@link StateInternals}.
  */
 class JStormStateInternals<K> implements StateInternals {
 
+  private static final Logger LOG = LoggerFactory.getLogger(JStormStateInternals.class);
+
   private static final String STATE_INFO = "state-info:";
 
   @Nullable
@@ -183,6 +194,459 @@ class JStormStateInternals<K> implements StateInternals {
     });
   }
 
+  /**
+   * JStorm implementation of {@link ValueState}.
+   */
+  private static class JStormValueState<K, T> implements ValueState<T> {
+
+    @Nullable
+    private final K key;
+    private final StateNamespace namespace;
+    private final IKvStore<ComposedKey, T> kvState;
+
+    JStormValueState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState) {
+      this.key = key;
+      this.namespace = namespace;
+      this.kvState = kvState;
+    }
+
+    @Override
+    public void write(T t) {
+      try {
+        kvState.put(getComposedKey(), t);
+      } catch (IOException e) {
+        throw new RuntimeException(String.format(
+            "Failed to write key: %s, namespace: %s, value: %s.", key, namespace, t));
+      }
+    }
+
+    @Override
+    public T read() {
+      try {
+        return kvState.get(getComposedKey());
+      } catch (IOException e) {
+        throw new RuntimeException(String.format(
+            "Failed to read key: %s, namespace: %s.", key, namespace));
+      }
+    }
+
+    @Override
+    public ValueState<T> readLater() {
+      // TODO: support prefetch.
+      return this;
+    }
+
+    @Override
+    public void clear() {
+      try {
+        kvState.remove(getComposedKey());
+      } catch (IOException e) {
+        throw new RuntimeException(String.format(
+            "Failed to clear key: %s, namespace: %s.", key, namespace));
+      }
+    }
+
+    private ComposedKey getComposedKey() {
+      return ComposedKey.of(key, namespace);
+    }
+  }
+
+  /**
+   * Implementation of {@link BagState} in JStorm runner.
+   */
+  private static class JStormBagState<K, T> implements BagState<T> {
+
+    @Nullable
+    private final K key;
+    private final StateNamespace namespace;
+    private final IKvStore<ComposedKey, T> kvState;
+    private final IKvStore<ComposedKey, Object> stateInfoKvState;
+    private int elemIndex;
+
+    JStormBagState(
+        @Nullable K key,
+        StateNamespace namespace,
+        IKvStore<ComposedKey, T> kvState,
+        IKvStore<ComposedKey, Object> stateInfoKvState) throws IOException {
+      this.key = key;
+      this.namespace = checkNotNull(namespace, "namespace");
+      this.kvState = checkNotNull(kvState, "kvState");
+      this.stateInfoKvState = checkNotNull(stateInfoKvState, "stateInfoKvState");
+
+      Integer index = (Integer) stateInfoKvState.get(getComposedKey());
+      this.elemIndex = index != null ? ++index : 0;
+    }
+
+    @Override
+    public void add(T input) {
+      try {
+        kvState.put(getComposedKey(elemIndex), input);
+        stateInfoKvState.put(getComposedKey(), elemIndex);
+        elemIndex++;
+      } catch (IOException e) {
+        throw new RuntimeException(e.getCause());
+      }
+    }
+
+    @Override
+    public ReadableState<Boolean> isEmpty() {
+      return new ReadableState<Boolean>() {
+        @Override
+        public Boolean read() {
+          return elemIndex <= 0;
+        }
+
+        @Override
+        public ReadableState<Boolean> readLater() {
+          // TODO: support prefetch.
+          return this;
+        }
+      };
+    }
+
+    @Override
+    public Iterable<T> read() {
+      return new BagStateIterable(elemIndex);
+    }
+
+    @Override
+    public BagState readLater() {
+      // TODO: support prefetch.
+      return this;
+    }
+
+    @Override
+    public void clear() {
+      try {
+        for (int i = 0; i < elemIndex; i++) {
+          kvState.remove(getComposedKey(i));
+        }
+        stateInfoKvState.remove(getComposedKey());
+        elemIndex = 0;
+      } catch (IOException e) {
+        throw new RuntimeException(e.getCause());
+      }
+    }
+
+    private ComposedKey getComposedKey() {
+      return ComposedKey.of(key, namespace);
+    }
+
+    private ComposedKey getComposedKey(int elemIndex) {
+      return ComposedKey.of(key, namespace, elemIndex);
+    }
+
+    /**
+     * Implementation of Bag state Iterable.
+     */
+    private class BagStateIterable implements KvStoreIterable<T> {
+
+      private class BagStateIterator implements Iterator<T> {
+        private final int size;
+        private int cursor = 0;
+
+        BagStateIterator() {
+          Integer s = null;
+          try {
+            s = (Integer) stateInfoKvState.get(getComposedKey());
+          } catch (IOException e) {
+            LOG.error("Failed to get elemIndex for key={}", getComposedKey());
+          }
+          this.size = s != null ? ++s : 0;
+        }
+
+        @Override
+        public boolean hasNext() {
+          return cursor < size;
+        }
+
+        @Override
+        public T next() {
+          if (cursor >= size) {
+            throw new NoSuchElementException();
+          }
+
+          T value = null;
+          try {
+            value = kvState.get(getComposedKey(cursor));
+          } catch (IOException e) {
+            LOG.error("Failed to read composed key-[{}]", getComposedKey(cursor));
+          }
+          cursor++;
+          return value;
+        }
+
+        @Override
+        public void remove() {
+          throw new UnsupportedOperationException();
+        }
+      }
+
+      private final int size;
+
+      BagStateIterable(int size) {
+        this.size = size;
+      }
+
+      @Override
+      public Iterator<T> iterator() {
+        return new BagStateIterator();
+      }
+
+      @Override
+      public String toString() {
+        return String.format("BagStateIterable: composedKey=%s", getComposedKey());
+      }
+    }
+  }
+
+  /**
+   * JStorm implementation of {@link CombiningState}.
+   */
+  private static class JStormCombiningState<InputT, AccumT, OutputT>
+      implements CombiningState<InputT, AccumT, OutputT> {
+
+    @Nullable
+    private final BagState<AccumT> accumBagState;
+    private final Combine.CombineFn<InputT, AccumT, OutputT> combineFn;
+
+    JStormCombiningState(
+        BagState<AccumT> accumBagState,
+        Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
+      this.accumBagState = checkNotNull(accumBagState, "accumBagState");
+      this.combineFn = checkNotNull(combineFn, "combineFn");
+    }
+
+    @Override
+    public AccumT getAccum() {
+      // TODO: replacing the accumBagState with the merged accum.
+      return combineFn.mergeAccumulators(accumBagState.read());
+    }
+
+    @Override
+    public void addAccum(AccumT accumT) {
+      accumBagState.add(accumT);
+    }
+
+    @Override
+    public AccumT mergeAccumulators(Iterable<AccumT> iterable) {
+      return combineFn.mergeAccumulators(iterable);
+    }
+
+    @Override
+    public void add(InputT input) {
+      accumBagState.add(
+          combineFn.addInput(combineFn.createAccumulator(), input));
+    }
+
+    @Override
+    public ReadableState<Boolean> isEmpty() {
+      return accumBagState.isEmpty();
+    }
+
+    @Override
+    public OutputT read() {
+      return combineFn.extractOutput(
+          combineFn.mergeAccumulators(accumBagState.read()));
+    }
+
+    @Override
+    public CombiningState<InputT, AccumT, OutputT> readLater() {
+      // TODO: support prefetch.
+      return this;
+    }
+
+    @Override
+    public void clear() {
+      accumBagState.clear();
+    }
+  }
+
+  /**
+   * Implementation of {@link MapState} in JStorm runner.
+   * @param <K>
+   * @param <V>
+   */
+  private static class JStormMapState<K, V> implements MapState<K, V> {
+
+    private final K key;
+    private final StateNamespace namespace;
+    private IKvStore<K, V> kvStore;
+
+    JStormMapState(K key, StateNamespace namespace, IKvStore<K, V> kvStore) {
+      this.key = key;
+      this.namespace = namespace;
+      this.kvStore = kvStore;
+    }
+
+    @Override
+    public void put(K var1, V var2) {
+      try {
+        kvStore.put(var1, var2);
+      } catch (IOException e) {
+        reportError(String.format("Failed to put key=%s, value=%s", var1, var2), e);
+      }
+    }
+
+    @Override
+    public ReadableState<V> putIfAbsent(K var1, V var2) {
+      ReadableState<V> ret = null;
+      try {
+        V value = kvStore.get(var1);
+        if (value == null) {
+          kvStore.put(var1, var2);
+          ret = new MapReadableState<>(null);
+        } else {
+          ret = new MapReadableState<>(value);
+        }
+      } catch (IOException e) {
+        reportError(String.format("Failed to putIfAbsent key=%s, value=%s", var1, var2), e);
+      }
+      return ret;
+    }
+
+    @Override
+    public void remove(K var1) {
+      try {
+        kvStore.remove(var1);
+      } catch (IOException e) {
+        reportError(String.format("Failed to remove key=%s", var1), e);
+      }
+    }
+
+    @Override
+    public ReadableState<V> get(K var1) {
+      ReadableState<V> ret = new MapReadableState<>(null);
+      try {
+        ret = new MapReadableState(kvStore.get(var1));
+      } catch (IOException e) {
+        reportError(String.format("Failed to get value for key=%s", var1), e);
+      }
+      return ret;
+    }
+
+    @Override
+    public ReadableState<Iterable<K>> keys() {
+      ReadableState<Iterable<K>> ret = new MapReadableState<>(null);
+      try {
+        ret = new MapReadableState<>(kvStore.keys());
+      } catch (IOException e) {
+        reportError(String.format("Failed to get keys"), e);
+      }
+      return ret;
+    }
+
+    @Override
+    public ReadableState<Iterable<V>> values() {
+      ReadableState<Iterable<V>> ret = new MapReadableState<>(null);
+      try {
+        ret = new MapReadableState<>(kvStore.values());
+      } catch (IOException e) {
+        reportError(String.format("Failed to get values"), e);
+      }
+      return ret;
+    }
+
+    @Override
+    public ReadableState<Iterable<Map.Entry<K, V>>> entries() {
+      ReadableState<Iterable<Map.Entry<K, V>>> ret = new MapReadableState<>(null);
+      try {
+        ret = new MapReadableState<>(kvStore.entries());
+      } catch (IOException e) {
+        reportError(String.format("Failed to get values"), e);
+      }
+      return ret;
+    }
+
+    @Override
+    public void clear() {
+      try {
+        Iterable<K> keys = kvStore.keys();
+        kvStore.removeBatch(keys);
+      } catch (IOException e) {
+        reportError(String.format("Failed to clear map state"), e);
+      }
+    }
+
+    private void reportError(String errorInfo, IOException e) {
+      LOG.error(errorInfo, e);
+      throw new RuntimeException(errorInfo);
+    }
+
+    private class MapReadableState<T> implements ReadableState<T> {
+      private T value;
+
+      public MapReadableState(T value) {
+        this.value = value;
+      }
+
+      @Override
+      public T read() {
+        return value;
+      }
+
+      @Override
+      public ReadableState<T> readLater() {
+        return this;
+      }
+    }
+  }
+
+  /**
+   * JStorm implementation of {@link WatermarkHoldState}.
+   */
+  private static class JStormWatermarkHoldState implements WatermarkHoldState {
+
+    private final StateNamespace namespace;
+    private final GroupingState<Instant, Instant> watermarkHoldsState;
+    private final TimestampCombiner timestampCombiner;
+    private final TimerService timerService;
+
+    JStormWatermarkHoldState(
+        StateNamespace namespace,
+        GroupingState<Instant, Instant> watermarkHoldsState,
+        TimestampCombiner timestampCombiner,
+        TimerService timerService) {
+      this.namespace = checkNotNull(namespace, "namespace");
+      this.watermarkHoldsState = checkNotNull(watermarkHoldsState, "watermarkHoldsState");
+      this.timestampCombiner = checkNotNull(timestampCombiner, "timestampCombiner");
+      this.timerService = checkNotNull(timerService, "timerService");
+    }
+
+    @Override
+    public TimestampCombiner getTimestampCombiner() {
+      return timestampCombiner;
+    }
+
+    @Override
+    public void add(Instant instant) {
+      timerService.addWatermarkHold(namespace.stringKey(), instant);
+      watermarkHoldsState.add(instant);
+    }
+
+    @Override
+    public ReadableState<Boolean> isEmpty() {
+      return watermarkHoldsState.isEmpty();
+    }
+
+    @Override
+    public Instant read() {
+      return watermarkHoldsState.read();
+    }
+
+    @Override
+    public WatermarkHoldState readLater() {
+      // TODO: support prefetch.
+      return this;
+    }
+
+    @Override
+    public void clear() {
+      timerService.clearWatermarkHold(namespace.stringKey());
+      watermarkHoldsState.clear();
+    }
+  }
+
   private String getStoreId(String stateId) {
     return String.format("%s-%s", stateId, executorId);
   }

http://git-wip-us.apache.org/repos/asf/beam/blob/9abbbd06/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormValueState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormValueState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormValueState.java
deleted file mode 100644
index 5d79d21..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormValueState.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation;
-
-import com.alibaba.jstorm.cache.ComposedKey;
-import com.alibaba.jstorm.cache.IKvStore;
-import java.io.IOException;
-import javax.annotation.Nullable;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.ValueState;
-
-/**
- * JStorm implementation of {@link ValueState}.
- */
-class JStormValueState<K, T> implements ValueState<T> {
-
-  @Nullable
-  private final K key;
-  private final StateNamespace namespace;
-  private final IKvStore<ComposedKey, T> kvState;
-
-  JStormValueState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState) {
-    this.key = key;
-    this.namespace = namespace;
-    this.kvState = kvState;
-  }
-
-  @Override
-  public void write(T t) {
-    try {
-      kvState.put(getComposedKey(), t);
-    } catch (IOException e) {
-      throw new RuntimeException(String.format(
-          "Failed to write key: %s, namespace: %s, value: %s.", key, namespace, t));
-    }
-  }
-
-  @Override
-  public T read() {
-    try {
-      return kvState.get(getComposedKey());
-    } catch (IOException e) {
-      throw new RuntimeException(String.format(
-          "Failed to read key: %s, namespace: %s.", key, namespace));
-    }
-  }
-
-  @Override
-  public ValueState<T> readLater() {
-    // TODO: support prefetch.
-    return this;
-  }
-
-  @Override
-  public void clear() {
-    try {
-      kvState.remove(getComposedKey());
-    } catch (IOException e) {
-      throw new RuntimeException(String.format(
-          "Failed to clear key: %s, namespace: %s.", key, namespace));
-    }
-  }
-
-  private ComposedKey getComposedKey() {
-    return ComposedKey.of(key, namespace);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/9abbbd06/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormWatermarkHoldState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormWatermarkHoldState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormWatermarkHoldState.java
deleted file mode 100644
index 7e1c28f..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormWatermarkHoldState.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.GroupingState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.apache.beam.sdk.state.WatermarkHoldState;
-import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
-import org.joda.time.Instant;
-
-/**
- * JStorm implementation of {@link WatermarkHoldState}.
- */
-class JStormWatermarkHoldState implements WatermarkHoldState {
-
-  private final StateNamespace namespace;
-  private final GroupingState<Instant, Instant> watermarkHoldsState;
-  private final TimestampCombiner timestampCombiner;
-  private final TimerService timerService;
-
-  JStormWatermarkHoldState(
-      StateNamespace namespace,
-      GroupingState<Instant, Instant> watermarkHoldsState,
-      TimestampCombiner timestampCombiner,
-      TimerService timerService) {
-    this.namespace = checkNotNull(namespace, "namespace");
-    this.watermarkHoldsState = checkNotNull(watermarkHoldsState, "watermarkHoldsState");
-    this.timestampCombiner = checkNotNull(timestampCombiner, "timestampCombiner");
-    this.timerService = checkNotNull(timerService, "timerService");
-  }
-
-  @Override
-  public TimestampCombiner getTimestampCombiner() {
-    return timestampCombiner;
-  }
-
-  @Override
-  public void add(Instant instant) {
-    timerService.addWatermarkHold(namespace.stringKey(), instant);
-    watermarkHoldsState.add(instant);
-  }
-
-  @Override
-  public ReadableState<Boolean> isEmpty() {
-    return watermarkHoldsState.isEmpty();
-  }
-
-  @Override
-  public Instant read() {
-    return watermarkHoldsState.read();
-  }
-
-  @Override
-  public WatermarkHoldState readLater() {
-    // TODO: support prefetch.
-    return this;
-  }
-
-  @Override
-  public void clear() {
-    timerService.clearWatermarkHold(namespace.stringKey());
-    watermarkHoldsState.clear();
-  }
-}


[16/53] [abbrv] beam git commit: jstorm-runner: fix compilation error and remove obsolete method.

Posted by pe...@apache.org.
jstorm-runner: fix compilation error and remove obsolete method.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/78a5076a
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/78a5076a
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/78a5076a

Branch: refs/heads/jstorm-runner
Commit: 78a5076a6951a697922aceaabc1e32dd20c8de36
Parents: 4ff42cb
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Fri Jul 14 10:29:00 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:56 2017 +0800

----------------------------------------------------------------------
 .../beam/runners/jstorm/JStormRunner.java       |  4 ++--
 .../serialization/ImmutableListSerializer.java  | 21 --------------------
 2 files changed, 2 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/78a5076a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
index 5fdbe4d..5375d6e 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
@@ -38,7 +38,7 @@ import org.apache.beam.runners.jstorm.serialization.KvStoreIterableSerializer;
 import org.apache.beam.runners.jstorm.serialization.SdkRepackImmuListSerializer;
 import org.apache.beam.runners.jstorm.serialization.SdkRepackImmuSetSerializer;
 import org.apache.beam.runners.jstorm.serialization.UnmodifiableCollectionsSerializer;
-import org.apache.beam.runners.jstorm.translation.StormPipelineTranslator;
+import org.apache.beam.runners.jstorm.translation.JStormPipelineTranslator;
 import org.apache.beam.runners.jstorm.translation.TranslationContext;
 import org.apache.beam.runners.jstorm.translation.runtime.AbstractComponent;
 import org.apache.beam.runners.jstorm.translation.runtime.AdaptorBasicBolt;
@@ -114,7 +114,7 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
   public JStormRunnerResult run(Pipeline pipeline) {
     LOG.info("Running pipeline...");
     TranslationContext context = new TranslationContext(this.options);
-    StormPipelineTranslator transformer = new StormPipelineTranslator(context);
+    JStormPipelineTranslator transformer = new JStormPipelineTranslator(context);
     transformer.translate(pipeline);
     LOG.info("UserGraphContext=\n{}", context.getUserGraphContext());
     LOG.info("ExecutionGraphContext=\n{}", context.getExecutionGraphContext());

http://git-wip-us.apache.org/repos/asf/beam/blob/78a5076a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
index fa4eeb6..c479f26 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
@@ -60,49 +60,28 @@ public class ImmutableListSerializer extends Serializer<ImmutableList<Object>> {
     //      Used by return value of #values() when there are multiple cells
 
     config.registerSerialization(ImmutableList.class, ImmutableListSerializer.class);
-    config.registerSerialization(
-        RunnerUtils.getBeamSdkRepackClass(ImmutableList.class), ImmutableListSerializer.class);
 
     // Note:
     //  Only registering above is good enough for serializing/deserializing.
     //  but if using Kryo#copy, following is required.
 
     config.registerSerialization(ImmutableList.of().getClass(), ImmutableListSerializer.class);
-    config.registerSerialization(
-        RunnerUtils.getBeamSdkRepackClass(ImmutableList.of().getClass()),
-        ImmutableListSerializer.class);
     config.registerSerialization(ImmutableList.of(1).getClass(), ImmutableListSerializer.class);
     config.registerSerialization(
-        RunnerUtils.getBeamSdkRepackClass(ImmutableList.of(1).getClass()),
-        ImmutableListSerializer.class);
-    config.registerSerialization(
         ImmutableList.of(1, 2, 3).subList(1, 2).getClass(),
         ImmutableListSerializer.class);
     config.registerSerialization(
-        RunnerUtils.getBeamSdkRepackClass(ImmutableList.of(1, 2, 3).subList(1, 2).getClass()),
-        ImmutableListSerializer.class);
-    config.registerSerialization(
         ImmutableList.of().reverse().getClass(),
         ImmutableListSerializer.class);
-    config.registerSerialization(
-        RunnerUtils.getBeamSdkRepackClass(ImmutableList.of().reverse().getClass()),
-        ImmutableListSerializer.class);
 
     config.registerSerialization(
         Lists.charactersOf("KryoRocks").getClass(),
         ImmutableListSerializer.class);
-    config.registerSerialization(
-        RunnerUtils.getBeamSdkRepackClass(Lists.charactersOf("KryoRocks").getClass()),
-        ImmutableListSerializer.class);
 
     Table<Integer, Integer, Integer> baseTable = HashBasedTable.create();
     baseTable.put(1, 2, 3);
     baseTable.put(4, 5, 6);
     Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
     config.registerSerialization(table.values().getClass(), ImmutableListSerializer.class);
-    config.registerSerialization(
-        RunnerUtils.getBeamSdkRepackClass(table.values().getClass()),
-        ImmutableListSerializer.class);
-
   }
 }


[33/53] [abbrv] beam git commit: jstorm-runner: Throw AssertionError instead of RuntimeException when pipeline encounter exception

Posted by pe...@apache.org.
jstorm-runner: Throw AssertionError instead of RuntimeException when pipeline encounter exception


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/af5221c0
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/af5221c0
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/af5221c0

Branch: refs/heads/jstorm-runner
Commit: af5221c001678e36de6492fa20b3fc4026f486e8
Parents: dc6f63c
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Tue Jul 18 14:50:19 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:58 2017 +0800

----------------------------------------------------------------------
 .../beam/runners/jstorm/TestJStormRunner.java   | 41 ++++++++++----------
 1 file changed, 21 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/af5221c0/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
index a117675..0088cf9 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
@@ -47,6 +47,7 @@ public class TestJStormRunner extends PipelineRunner<JStormRunnerResult> {
 
   @Override
   public JStormRunnerResult run(Pipeline pipeline) {
+    TaskReportErrorAndDie.setExceptionRecord(null);
     JStormRunnerResult result = stormRunner.run(pipeline);
 
     try {
@@ -54,30 +55,30 @@ public class TestJStormRunner extends PipelineRunner<JStormRunnerResult> {
 
       LOG.info("Running JStorm job {} with {} expected assertions.",
                result.getTopologyName(), numberOfAssertions);
-      if (numberOfAssertions == 0) {
-        // If assert number is zero, wait 5 sec
-        JStormUtils.sleepMs(5000);
+
+      int maxTimeoutSec = numberOfAssertions > 0 ? 20 : 5;
+      for (int waitTime = 0; waitTime <= maxTimeoutSec * 1000; ) {
+        Optional<Boolean> success = numberOfAssertions > 0
+                ? checkForPAssertSuccess(numberOfAssertions) : Optional.<Boolean>absent();
         Exception taskExceptionRec = TaskReportErrorAndDie.getExceptionRecord();
-        if (taskExceptionRec != null) {
-          throw new RuntimeException(taskExceptionRec.getCause());
-        }
-        return result;
-      } else {
-        for (int i = 0; i < 40; ++i) {
-          Optional<Boolean> success = checkForPAssertSuccess(numberOfAssertions);
-          Exception taskExceptionRec = TaskReportErrorAndDie.getExceptionRecord();
-          if (success.isPresent() && success.get()) {
-            return result;
-          } else if (success.isPresent() && !success.get()) {
-            throw new AssertionError("Failed assertion checks.");
-          } else if (taskExceptionRec != null) {
-            throw new RuntimeException(taskExceptionRec.getCause());
-          } else {
-            JStormUtils.sleepMs(500);
-          }
+        if (success.isPresent() && success.get()) {
+          return result;
+        } else if (success.isPresent() && !success.get()) {
+          throw new AssertionError("Failed assertion checks.");
+        } else if (taskExceptionRec != null) {
+          LOG.info("Exception was found.", taskExceptionRec);
+          throw new AssertionError(taskExceptionRec.getCause());
+        } else {
+          JStormUtils.sleepMs(500);
+          waitTime += 500;
         }
+      }
+
+      if (numberOfAssertions > 0) {
         LOG.info("Assertion checks timed out.");
         throw new AssertionError("Assertion checks timed out.");
+      } else {
+        return result;
       }
     } finally {
       clearPAssertCount();


[34/53] [abbrv] beam git commit: jstorm-runner: update jstorm runner pacakge name in logback config.

Posted by pe...@apache.org.
jstorm-runner: update jstorm runner pacakge name in logback config.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/1bf32247
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/1bf32247
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/1bf32247

Branch: refs/heads/jstorm-runner
Commit: 1bf3224708b6b2a9c195eddf65a7bfb279d6806d
Parents: aca16cc
Author: Pei He <pe...@apache.org>
Authored: Wed Jul 19 16:14:14 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:59 2017 +0800

----------------------------------------------------------------------
 runners/jstorm/src/test/resources/logback.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/1bf32247/runners/jstorm/src/test/resources/logback.xml
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/resources/logback.xml b/runners/jstorm/src/test/resources/logback.xml
index be3bc8f..635933c 100644
--- a/runners/jstorm/src/test/resources/logback.xml
+++ b/runners/jstorm/src/test/resources/logback.xml
@@ -14,7 +14,7 @@
     <logger name="com.alibaba.jstorm.metric" level="ERROR" />
     <logger name="shade.storm.org.apache.zookeeper" level="ERROR"/>
     <logger name="shade.storm.org.apache.curator" level="ERROR"/>
-    <logger name="com.alibaba.jstorm.beam" level="INFO"/>
+    <logger name="org.apache.beam.runners.jstorm" level="INFO"/>
 
     <!-- sync client logback with jstorm.logback.xml  -->
     <logger name="com.alibaba.jstorm.daemon.worker.hearbeat" level="WARN"/>


[32/53] [abbrv] beam git commit: jstorm-runner: disable validates runner tests with TestStream and Metrics.

Posted by pe...@apache.org.
jstorm-runner: disable validates runner tests with TestStream and Metrics.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/dc6f63ca
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/dc6f63ca
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/dc6f63ca

Branch: refs/heads/jstorm-runner
Commit: dc6f63cafa9c91535f48aa483d316bb9a0d12a41
Parents: a5af6d2
Author: Pei He <pe...@apache.org>
Authored: Tue Jul 18 14:48:25 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:58 2017 +0800

----------------------------------------------------------------------
 runners/jstorm/pom.xml | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/dc6f63ca/runners/jstorm/pom.xml
----------------------------------------------------------------------
diff --git a/runners/jstorm/pom.xml b/runners/jstorm/pom.xml
index 9808cd2..5d54d94 100644
--- a/runners/jstorm/pom.xml
+++ b/runners/jstorm/pom.xml
@@ -61,8 +61,10 @@
                   </groups>
                   <excludedGroups>
                     org.apache.beam.sdk.testing.UsesSetState,
+                    org.apache.beam.sdk.testing.UsesSplittableParDo,
+                    org.apache.beam.sdk.testing.UsesAttemptedMetrics,
                     org.apache.beam.sdk.testing.UsesCommittedMetrics,
-                    org.apache.beam.sdk.testing.UsesSplittableParDo
+                    org.apache.beam.sdk.testing.UsesTestStream
                   </excludedGroups>
                   <parallel>none</parallel>
                   <failIfNoTests>true</failIfNoTests>


[36/53] [abbrv] beam git commit: jstorm-runner: Fix the failure of session window test cases in CombineTest

Posted by pe...@apache.org.
jstorm-runner:  Fix the failure of session window test cases in CombineTest


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/52913b7e
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/52913b7e
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/52913b7e

Branch: refs/heads/jstorm-runner
Commit: 52913b7e2b01b4e6c65d96a10d745dd3e6739c83
Parents: 201ef72
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Thu Jul 20 14:37:29 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:59 2017 +0800

----------------------------------------------------------------------
 .../jstorm/translation/FlattenTranslator.java   |   1 -
 .../translation/JStormStateInternals.java       | 188 +++++++++++++++++--
 2 files changed, 176 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/52913b7e/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
index 8f239bf..e104ad8 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
@@ -28,7 +28,6 @@ import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.VoidCoder;
 import org.apache.beam.sdk.io.UnboundedSource;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.values.PCollection;

http://git-wip-us.apache.org/repos/asf/beam/blob/52913b7e/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
index 68a17e5..90ef6d2 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
@@ -42,6 +42,7 @@ import org.apache.beam.sdk.state.State;
 import org.apache.beam.sdk.state.StateBinder;
 import org.apache.beam.sdk.state.StateContext;
 import org.apache.beam.sdk.state.StateSpec;
+import org.apache.beam.sdk.state.StateSpecs;
 import org.apache.beam.sdk.state.ValueState;
 import org.apache.beam.sdk.state.WatermarkHoldState;
 import org.apache.beam.sdk.transforms.Combine;
@@ -93,13 +94,14 @@ class JStormStateInternals<K> implements StateInternals {
   }
 
   @Override
-  public <T extends State> T state(final StateNamespace namespace, StateTag<T> address) {
+  public <T extends State> T state(final StateNamespace namespace, final StateTag<T> address) {
     return address.getSpec().bind(address.getId(), new StateBinder() {
       @Override
       public <T> ValueState<T> bindValue(String id, StateSpec<ValueState<T>> spec, Coder<T> coder) {
         try {
           return new JStormValueState<>(
-              getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)));
+              getStoreId(id), spec, getKey(), namespace,
+              kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)));
         } catch (IOException e) {
           throw new RuntimeException();
         }
@@ -109,7 +111,8 @@ class JStormStateInternals<K> implements StateInternals {
       public <T> BagState<T> bindBag(String id, StateSpec<BagState<T>> spec, Coder<T> elemCoder) {
         try {
           return new JStormBagState(
-              getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)),
+              getStoreId(id), spec, getKey(), namespace,
+              kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)),
               kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
         } catch (IOException e) {
           throw new RuntimeException();
@@ -129,7 +132,8 @@ class JStormStateInternals<K> implements StateInternals {
           Coder<ValueT> mapValueCoder) {
         try {
           return new JStormMapState<>(
-              getKey(), namespace, kvStoreManager.<KeyT, ValueT>getOrCreate(getStoreId(id)));
+              getStoreId(id), spec, (KeyT) getKey(), namespace,
+              kvStoreManager.<KeyT, ValueT>getOrCreate(getStoreId(id)));
         } catch (IOException e) {
           throw new RuntimeException(e);
         }
@@ -143,10 +147,11 @@ class JStormStateInternals<K> implements StateInternals {
           Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
         try {
           BagState<AccumT> accumBagState = new JStormBagState(
-              getKey(), namespace,
+              getStoreId(id), StateSpecs.<InputT>bag(), getKey(), namespace,
               kvStoreManager.<ComposedKey, AccumT>getOrCreate(getStoreId(id)),
               kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
-          return new JStormCombiningState<>(accumBagState, combineFn);
+          return new JStormCombiningState<>(
+              id, spec, namespace, accumBagState, combineFn);
         } catch (IOException e) {
           throw new RuntimeException();
         }
@@ -169,7 +174,7 @@ class JStormStateInternals<K> implements StateInternals {
           final TimestampCombiner timestampCombiner) {
         try {
           BagState<Combine.Holder<Instant>> accumBagState = new JStormBagState(
-              getKey(), namespace,
+              getStoreId(id), StateSpecs.<Combine.Holder<Instant>>bag(), getKey(), namespace,
               kvStoreManager.<ComposedKey, Combine.Holder<Instant>>getOrCreate(getStoreId(id)),
               kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
 
@@ -181,8 +186,11 @@ class JStormStateInternals<K> implements StateInternals {
                 }
               };
           return new JStormWatermarkHoldState(
-              namespace,
+              id, spec, namespace,
               new JStormCombiningState<>(
+                  getStoreId(id),
+                  StateSpecs.combining(outputTimeCombineFn),
+                  namespace,
                   accumBagState,
                   outputTimeCombineFn),
               timestampCombiner,
@@ -199,12 +207,21 @@ class JStormStateInternals<K> implements StateInternals {
    */
   private static class JStormValueState<K, T> implements ValueState<T> {
 
+    private final String id;
+    private final StateSpec<ValueState<T>> spec;
     @Nullable
     private final K key;
     private final StateNamespace namespace;
     private final IKvStore<ComposedKey, T> kvState;
 
-    JStormValueState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState) {
+    JStormValueState(
+            String id,
+            StateSpec<ValueState<T>> spec,
+            @Nullable K key,
+            StateNamespace namespace,
+            IKvStore<ComposedKey, T> kvState) {
+      this.id = id;
+      this.spec = spec;
       this.key = key;
       this.namespace = namespace;
       this.kvState = kvState;
@@ -246,6 +263,29 @@ class JStormStateInternals<K> implements StateInternals {
       }
     }
 
+    @Override
+    public boolean equals(Object o) {
+      if (this == o) {
+        return true;
+      }
+      if (o == null || getClass() != o.getClass()) {
+        return false;
+      }
+
+      JStormValueState<?, ?> that = (JStormValueState<?, ?>) o;
+
+      return namespace.equals(that.namespace) && id.equals(that.id) && spec.equals(that.spec);
+
+    }
+
+    @Override
+    public int hashCode() {
+      int result = namespace.hashCode();
+      result = 31 * result + id.hashCode();
+      result = 31 * result + spec.hashCode();
+      return result;
+    }
+
     private ComposedKey getComposedKey() {
       return ComposedKey.of(key, namespace);
     }
@@ -256,6 +296,8 @@ class JStormStateInternals<K> implements StateInternals {
    */
   private static class JStormBagState<K, T> implements BagState<T> {
 
+    private final String id;
+    private final StateSpec<BagState<T>> spec;
     @Nullable
     private final K key;
     private final StateNamespace namespace;
@@ -263,10 +305,14 @@ class JStormStateInternals<K> implements StateInternals {
     private final IKvStore<ComposedKey, Object> stateInfoKvState;
 
     JStormBagState(
+        String id,
+        StateSpec<BagState<T>> spec,
         @Nullable K key,
         StateNamespace namespace,
         IKvStore<ComposedKey, T> kvState,
         IKvStore<ComposedKey, Object> stateInfoKvState) throws IOException {
+      this.id = id;
+      this.spec = spec;
       this.key = key;
       this.namespace = checkNotNull(namespace, "namespace");
       this.kvState = checkNotNull(kvState, "kvState");
@@ -350,8 +396,31 @@ class JStormStateInternals<K> implements StateInternals {
       } catch (IOException e) {
 
       }
-      return String.format("JStormBagState: key=%s, namespace=%s, elementIndex=%d",
-              key, namespace, elemIndex);
+      return String.format("stateId=%s, key=%s, namespace=%s, elementIndex=%d",
+              id, key, namespace, elemIndex);
+    }
+
+    @Override
+    public boolean equals(Object o) {
+      if (this == o) {
+        return true;
+      }
+      if (o == null || getClass() != o.getClass()) {
+        return false;
+      }
+
+      JStormBagState<?, ?> that = (JStormBagState<?, ?>) o;
+
+      return namespace.equals(that.namespace) && id.equals(that.id) && spec.equals(that.spec);
+
+    }
+
+    @Override
+    public int hashCode() {
+      int result = namespace.hashCode();
+      result = 31 * result + id.hashCode();
+      result = 31 * result + spec.hashCode();
+      return result;
     }
 
     /**
@@ -420,13 +489,22 @@ class JStormStateInternals<K> implements StateInternals {
   private static class JStormCombiningState<InputT, AccumT, OutputT>
       implements CombiningState<InputT, AccumT, OutputT> {
 
+    private final String id;
+    private final StateSpec<CombiningState<InputT, AccumT, OutputT>> spec;
+    private final StateNamespace namespace;
     @Nullable
     private final BagState<AccumT> accumBagState;
     private final Combine.CombineFn<InputT, AccumT, OutputT> combineFn;
 
     JStormCombiningState(
+        String id,
+        StateSpec<CombiningState<InputT, AccumT, OutputT>> spec,
+        StateNamespace namespace,
         BagState<AccumT> accumBagState,
         Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
+      this.id = id;
+      this.spec = spec;
+      this.namespace = namespace;
       this.accumBagState = checkNotNull(accumBagState, "accumBagState");
       this.combineFn = checkNotNull(combineFn, "combineFn");
     }
@@ -474,6 +552,29 @@ class JStormStateInternals<K> implements StateInternals {
     public void clear() {
       accumBagState.clear();
     }
+
+    @Override
+    public boolean equals(Object o) {
+      if (this == o) {
+        return true;
+      }
+      if (o == null || getClass() != o.getClass()) {
+        return false;
+      }
+
+      JStormCombiningState<?, ?, ?> that = (JStormCombiningState<?, ?, ?>) o;
+
+      return namespace.equals(that.namespace) && id.equals(that.id) && spec.equals(that.spec);
+
+    }
+
+    @Override
+    public int hashCode() {
+      int result = namespace.hashCode();
+      result = 31 * result + id.hashCode();
+      result = 31 * result + spec.hashCode();
+      return result;
+    }
   }
 
   /**
@@ -483,11 +584,19 @@ class JStormStateInternals<K> implements StateInternals {
    */
   private static class JStormMapState<K, V> implements MapState<K, V> {
 
+    private final String id;
+    private final StateSpec<MapState<K, V>> spec;
     private final K key;
     private final StateNamespace namespace;
     private IKvStore<K, V> kvStore;
 
-    JStormMapState(K key, StateNamespace namespace, IKvStore<K, V> kvStore) {
+    JStormMapState(
+        String id,
+        StateSpec<MapState<K, V>> spec,
+        K key,
+        StateNamespace namespace, IKvStore<K, V> kvStore) {
+      this.id = id;
+      this.spec = spec;
       this.key = key;
       this.namespace = namespace;
       this.kvStore = kvStore;
@@ -582,6 +691,29 @@ class JStormStateInternals<K> implements StateInternals {
       }
     }
 
+    @Override
+    public boolean equals(Object o) {
+      if (this == o) {
+        return true;
+      }
+      if (o == null || getClass() != o.getClass()) {
+        return false;
+      }
+
+      JStormMapState<?, ?> that = (JStormMapState<?, ?>) o;
+
+      return namespace.equals(that.namespace) && id.equals(that.id) && spec.equals(that.spec);
+
+    }
+
+    @Override
+    public int hashCode() {
+      int result = namespace.hashCode();
+      result = 31 * result + id.hashCode();
+      result = 31 * result + spec.hashCode();
+      return result;
+    }
+
     private void reportError(String errorInfo, IOException e) {
       LOG.error(errorInfo, e);
       throw new RuntimeException(errorInfo);
@@ -611,16 +743,22 @@ class JStormStateInternals<K> implements StateInternals {
    */
   private static class JStormWatermarkHoldState implements WatermarkHoldState {
 
+    private final String id;
+    private final StateSpec<WatermarkHoldState> spec;
     private final StateNamespace namespace;
     private final GroupingState<Instant, Instant> watermarkHoldsState;
     private final TimestampCombiner timestampCombiner;
     private final TimerService timerService;
 
     JStormWatermarkHoldState(
+        String id,
+        StateSpec<WatermarkHoldState> spec,
         StateNamespace namespace,
         GroupingState<Instant, Instant> watermarkHoldsState,
         TimestampCombiner timestampCombiner,
         TimerService timerService) {
+      this.id = checkNotNull(id, "id");
+      this.spec = checkNotNull(spec, "spec");
       this.namespace = checkNotNull(namespace, "namespace");
       this.watermarkHoldsState = checkNotNull(watermarkHoldsState, "watermarkHoldsState");
       this.timestampCombiner = checkNotNull(timestampCombiner, "timestampCombiner");
@@ -659,6 +797,32 @@ class JStormStateInternals<K> implements StateInternals {
       timerService.clearWatermarkHold(namespace.stringKey());
       watermarkHoldsState.clear();
     }
+
+    @Override
+    public boolean equals(Object o) {
+      if (this == o) {
+        return true;
+      }
+      if (o == null || getClass() != o.getClass()) {
+        return false;
+      }
+
+      JStormWatermarkHoldState that = (JStormWatermarkHoldState) o;
+
+      return namespace.equals(that.namespace)
+          && id.equals(that.id)
+          && spec.equals(that.spec)
+          && timestampCombiner.equals(that.timestampCombiner);
+    }
+
+    @Override
+    public int hashCode() {
+      int result = namespace.hashCode();
+      result = 31 * result + id.hashCode();
+      result = 31 * result + spec.hashCode();
+      result = 31 * result + timestampCombiner.hashCode();
+      return result;
+    }
   }
 
   private String getStoreId(String stateId) {


[09/53] [abbrv] beam git commit: jstorm-runner: rename the package to org.apache.beam.runners.jstorm.

Posted by pe...@apache.org.
jstorm-runner: rename the package to org.apache.beam.runners.jstorm.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/aa654b3f
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/aa654b3f
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/aa654b3f

Branch: refs/heads/jstorm-runner
Commit: aa654b3f15a242221727d021cf4be676c49bd49b
Parents: 6ff07fc
Author: Pei He <pe...@apache.org>
Authored: Thu Jul 13 17:02:21 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 02:40:26 2017 +0800

----------------------------------------------------------------------
 .../jstorm/beam/StormPipelineOptions.java       |  72 ---
 .../com/alibaba/jstorm/beam/StormRegistrar.java |  48 --
 .../com/alibaba/jstorm/beam/StormRunner.java    | 339 ----------
 .../alibaba/jstorm/beam/TestJStormRunner.java   | 122 ----
 .../serialization/ImmutableListSerializer.java  |  92 ---
 .../serialization/ImmutableMapSerializer.java   |  62 --
 .../serialization/ImmutableSetSerializer.java   |  72 ---
 .../KvStoreIterableSerializer.java              |  55 --
 .../SdkRepackImmuListSerializer.java            |  78 ---
 .../SdkRepackImmuSetSerializer.java             |  72 ---
 .../UnmodifiableCollectionsSerializer.java      | 159 -----
 .../translation/StormPipelineTranslator.java    | 181 ------
 .../beam/translation/TranslationContext.java    | 425 -------------
 .../beam/translation/TranslatorRegistry.java    |  76 ---
 .../translation/runtime/AbstractComponent.java  |  71 ---
 .../translation/runtime/AdaptorBasicBolt.java   |  24 -
 .../translation/runtime/AdaptorBasicSpout.java  |  24 -
 .../beam/translation/runtime/DoFnExecutor.java  | 330 ----------
 .../runtime/DoFnRunnerWithMetrics.java          |  90 ---
 .../beam/translation/runtime/Executor.java      |  37 --
 .../translation/runtime/ExecutorContext.java    |  35 --
 .../beam/translation/runtime/ExecutorsBolt.java | 332 ----------
 .../translation/runtime/FlattenExecutor.java    |  55 --
 .../runtime/GroupByWindowExecutor.java          | 160 -----
 .../translation/runtime/MetricsReporter.java    |  93 ---
 .../runtime/MultiOutputDoFnExecutor.java        |  75 ---
 .../runtime/MultiStatefulDoFnExecutor.java      |  68 --
 .../runtime/StatefulDoFnExecutor.java           |  67 --
 .../beam/translation/runtime/TimerService.java  |  52 --
 .../translation/runtime/TimerServiceImpl.java   | 150 -----
 .../translation/runtime/TxExecutorsBolt.java    | 131 ----
 .../runtime/TxUnboundedSourceSpout.java         | 153 -----
 .../runtime/UnboundedSourceSpout.java           | 198 ------
 .../beam/translation/runtime/ViewExecutor.java  |  55 --
 .../runtime/WindowAssignExecutor.java           | 108 ----
 .../runtime/state/JStormBagState.java           | 178 ------
 .../runtime/state/JStormCombiningState.java     |  88 ---
 .../runtime/state/JStormMapState.java           | 155 -----
 .../runtime/state/JStormStateInternals.java     | 192 ------
 .../runtime/state/JStormValueState.java         |  84 ---
 .../runtime/state/JStormWatermarkHoldState.java |  83 ---
 .../runtime/timer/JStormTimerInternals.java     |  99 ---
 .../translator/BoundedSourceTranslator.java     |  50 --
 .../translator/CombineGloballyTranslator.java   |  24 -
 .../translator/CombinePerKeyTranslator.java     |  24 -
 .../translator/FlattenTranslator.java           |  49 --
 .../translator/GroupByKeyTranslator.java        |  69 --
 .../translator/ParDoBoundMultiTranslator.java   | 114 ----
 .../translator/ParDoBoundTranslator.java        | 106 ----
 .../translator/ReshuffleTranslator.java         |  24 -
 .../beam/translation/translator/Stream.java     |  91 ---
 .../translator/TransformTranslator.java         |  77 ---
 .../translator/UnboundedSourceTranslator.java   |  46 --
 .../translation/translator/ViewTranslator.java  | 374 -----------
 .../translator/WindowAssignTranslator.java      |  38 --
 .../translator/WindowBoundTranslator.java       |  48 --
 .../beam/translation/util/CommonInstance.java   |  25 -
 .../util/DefaultSideInputReader.java            |  46 --
 .../translation/util/DefaultStepContext.java    |  89 ---
 .../alibaba/jstorm/beam/util/RunnerUtils.java   |  53 --
 .../beam/util/SerializedPipelineOptions.java    |  64 --
 .../beam/util/SingletonKeyedWorkItem.java       |  62 --
 .../runners/jstorm/StormPipelineOptions.java    |  72 +++
 .../beam/runners/jstorm/StormRegistrar.java     |  48 ++
 .../apache/beam/runners/jstorm/StormRunner.java | 345 ++++++++++
 .../beam/runners/jstorm/TestJStormRunner.java   | 120 ++++
 .../serialization/ImmutableListSerializer.java  |  92 +++
 .../serialization/ImmutableMapSerializer.java   |  61 ++
 .../serialization/ImmutableSetSerializer.java   |  71 +++
 .../KvStoreIterableSerializer.java              |  55 ++
 .../SdkRepackImmuListSerializer.java            |  78 +++
 .../SdkRepackImmuSetSerializer.java             |  71 +++
 .../UnmodifiableCollectionsSerializer.java      | 159 +++++
 .../translation/StormPipelineTranslator.java    | 177 ++++++
 .../jstorm/translation/TranslationContext.java  | 424 +++++++++++++
 .../jstorm/translation/TranslatorRegistry.java  |  85 +++
 .../translation/runtime/AbstractComponent.java  |  70 +++
 .../translation/runtime/AdaptorBasicBolt.java   |  24 +
 .../translation/runtime/AdaptorBasicSpout.java  |  24 +
 .../translation/runtime/DoFnExecutor.java       | 328 ++++++++++
 .../runtime/DoFnRunnerWithMetrics.java          |  90 +++
 .../jstorm/translation/runtime/Executor.java    |  34 +
 .../translation/runtime/ExecutorContext.java    |  35 ++
 .../translation/runtime/ExecutorsBolt.java      | 327 ++++++++++
 .../translation/runtime/FlattenExecutor.java    |  55 ++
 .../runtime/GroupByWindowExecutor.java          | 157 +++++
 .../translation/runtime/MetricsReporter.java    |  87 +++
 .../runtime/MultiOutputDoFnExecutor.java        |  75 +++
 .../runtime/MultiStatefulDoFnExecutor.java      |  68 ++
 .../runtime/StatefulDoFnExecutor.java           |  67 ++
 .../translation/runtime/TimerService.java       |  52 ++
 .../translation/runtime/TimerServiceImpl.java   | 150 +++++
 .../translation/runtime/TxExecutorsBolt.java    | 131 ++++
 .../runtime/TxUnboundedSourceSpout.java         | 153 +++++
 .../runtime/UnboundedSourceSpout.java           | 193 ++++++
 .../translation/runtime/ViewExecutor.java       |  55 ++
 .../runtime/WindowAssignExecutor.java           | 107 ++++
 .../runtime/state/JStormBagState.java           | 178 ++++++
 .../runtime/state/JStormCombiningState.java     |  88 +++
 .../runtime/state/JStormMapState.java           | 154 +++++
 .../runtime/state/JStormStateInternals.java     | 191 ++++++
 .../runtime/state/JStormValueState.java         |  84 +++
 .../runtime/state/JStormWatermarkHoldState.java |  83 +++
 .../runtime/timer/JStormTimerInternals.java     |  99 +++
 .../translator/BoundedSourceTranslator.java     |  50 ++
 .../translator/CombineGloballyTranslator.java   |  24 +
 .../translator/CombinePerKeyTranslator.java     |  24 +
 .../translator/FlattenTranslator.java           |  49 ++
 .../translator/GroupByKeyTranslator.java        |  69 ++
 .../translator/ParDoBoundMultiTranslator.java   | 111 ++++
 .../translator/ParDoBoundTranslator.java        | 106 ++++
 .../translator/ReshuffleTranslator.java         |  24 +
 .../jstorm/translation/translator/Stream.java   |  91 +++
 .../translator/TransformTranslator.java         |  76 +++
 .../translator/UnboundedSourceTranslator.java   |  46 ++
 .../translation/translator/ViewTranslator.java  | 374 +++++++++++
 .../translator/WindowAssignTranslator.java      |  38 ++
 .../translator/WindowBoundTranslator.java       |  47 ++
 .../jstorm/translation/util/CommonInstance.java |  25 +
 .../util/DefaultSideInputReader.java            |  46 ++
 .../translation/util/DefaultStepContext.java    |  89 +++
 .../beam/runners/jstorm/util/RunnerUtils.java   |  53 ++
 .../jstorm/util/SerializedPipelineOptions.java  |  64 ++
 .../jstorm/util/SingletonKeyedWorkItem.java     |  62 ++
 .../runtime/state/JStormStateInternalsTest.java | 219 -------
 .../translator/CoGroupByKeyTest.java            | 302 ---------
 .../translation/translator/GroupByKeyTest.java  | 159 -----
 .../beam/translation/translator/ParDoTest.java  | 626 ------------------
 .../runtime/state/JStormStateInternalsTest.java | 219 +++++++
 .../translator/CoGroupByKeyTest.java            | 302 +++++++++
 .../translation/translator/GroupByKeyTest.java  | 158 +++++
 .../translation/translator/ParDoTest.java       | 627 +++++++++++++++++++
 132 files changed, 7791 insertions(+), 7819 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormPipelineOptions.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormPipelineOptions.java
deleted file mode 100644
index d1538ce..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormPipelineOptions.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam;
-
-import avro.shaded.com.google.common.collect.Maps;
-import org.apache.beam.sdk.options.*;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Options which can be used to configure a JStorm PipelineRunner.
- */
-public interface StormPipelineOptions extends PipelineOptions, ApplicationNameOptions, StreamingOptions {
-
-    @Description("Indicate if the topology is running on local machine or distributed cluster")
-    @Default.Boolean(false)
-    Boolean getLocalMode();
-    void setLocalMode(Boolean isLocal);
-
-    @Description("Executing time(sec) of topology on local mode. Default is 1min.")
-    @Default.Long(60)
-    Long getLocalModeExecuteTime();
-    void setLocalModeExecuteTime(Long time);
-
-    @Description("Worker number of topology")
-    @Default.Integer(1)
-    Integer getWorkerNumber();
-    void setWorkerNumber(Integer number);
-
-    @Description("Global parallelism number of a component")
-    @Default.Integer(1)
-    Integer getParallelismNumber();
-    void setParallelismNumber(Integer number);
-
-    @Description("System topology config of JStorm")
-    @Default.InstanceFactory(DefaultMapValueFactory.class)
-    Map getTopologyConfig();
-    void setTopologyConfig(Map conf);
-
-    @Description("Indicate if it is an exactly once topology")
-    @Default.Boolean(false)
-    Boolean getExactlyOnceTopology();
-    void setExactlyOnceTopology(Boolean isExactlyOnce);
-
-    @Description("Parallelism number of a specified composite PTransform")
-    @Default.InstanceFactory(DefaultMapValueFactory.class)
-    Map getParallelismNumMap();
-    void setParallelismNumMap(Map parallelismNumMap);
-
-    class DefaultMapValueFactory implements DefaultValueFactory<Map> {
-        @Override
-        public Map create(PipelineOptions pipelineOptions) {
-            return Maps.newHashMap();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormRegistrar.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormRegistrar.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormRegistrar.java
deleted file mode 100644
index 4ef4d01..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormRegistrar.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam;
-
-import com.google.auto.service.AutoService;
-import com.google.common.collect.ImmutableList;
-
-import org.apache.beam.sdk.PipelineRunner;
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.options.PipelineOptionsRegistrar;
-import org.apache.beam.sdk.runners.PipelineRunnerRegistrar;
-
-public class StormRegistrar {
-    private StormRegistrar() {
-    }
-
-    @AutoService(PipelineRunnerRegistrar.class)
-    public static class Runner implements PipelineRunnerRegistrar {
-        @Override
-        public Iterable<Class<? extends PipelineRunner<?>>> getPipelineRunners() {
-            return ImmutableList.<Class<? extends PipelineRunner<?>>> of(StormRunner.class);
-        }
-    }
-
-    @AutoService(PipelineOptionsRegistrar.class)
-    public static class Options implements PipelineOptionsRegistrar {
-        @Override
-        public Iterable<Class<? extends PipelineOptions>> getPipelineOptions() {
-            return ImmutableList.<Class<? extends PipelineOptions>> of(StormPipelineOptions.class);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormRunner.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormRunner.java
deleted file mode 100644
index e5db461..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/StormRunner.java
+++ /dev/null
@@ -1,339 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import backtype.storm.Config;
-import backtype.storm.LocalCluster;
-import backtype.storm.StormSubmitter;
-import backtype.storm.generated.StormTopology;
-import backtype.storm.topology.BoltDeclarer;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.TopologyBuilder;
-import backtype.storm.tuple.Fields;
-
-import com.alibaba.jstorm.beam.serialization.*;
-import com.alibaba.jstorm.beam.translation.StormPipelineTranslator;
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-import com.alibaba.jstorm.beam.translation.runtime.AbstractComponent;
-import com.alibaba.jstorm.beam.translation.runtime.AdaptorBasicBolt;
-import com.alibaba.jstorm.beam.translation.runtime.AdaptorBasicSpout;
-import com.alibaba.jstorm.beam.translation.runtime.ExecutorsBolt;
-import com.alibaba.jstorm.beam.translation.runtime.TxExecutorsBolt;
-import com.alibaba.jstorm.beam.translation.runtime.TxUnboundedSourceSpout;
-import com.alibaba.jstorm.beam.translation.runtime.UnboundedSourceSpout;
-import com.alibaba.jstorm.beam.translation.translator.Stream;
-import com.alibaba.jstorm.beam.translation.util.CommonInstance;
-import com.alibaba.jstorm.cache.KvStoreIterable;
-import com.alibaba.jstorm.client.ConfigExtension;
-import com.alibaba.jstorm.cluster.StormConfig;
-import com.alibaba.jstorm.transactional.TransactionTopologyBuilder;
-import com.alibaba.jstorm.utils.JStormUtils;
-
-import java.io.IOException;
-import java.util.HashMap;
-
-import java.util.Map;
-
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.PipelineResult;
-import org.apache.beam.sdk.PipelineRunner;
-import org.apache.beam.sdk.metrics.MetricResults;
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.options.PipelineOptionsValidator;
-import org.joda.time.Duration;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Main entry point into the Storm Runner.
- * 
- * After reading the user defined pipeline, Beam will invoke the run() method with a representation of the pipeline.
- */
-public class StormRunner extends PipelineRunner<StormRunner.StormPipelineResult> {
-    private static final Logger LOG = LoggerFactory.getLogger(StormRunner.class);
-
-    private StormPipelineOptions options;
-
-    public StormRunner(StormPipelineOptions options) {
-        this.options = options;
-    }
-
-    public static StormRunner fromOptions(PipelineOptions options) {
-        StormPipelineOptions pipelineOptions = PipelineOptionsValidator.validate(StormPipelineOptions.class, options);
-        return new StormRunner(pipelineOptions);
-    }
-
-    /**
-     * convert pipeline options to storm configuration format
-     * @param options
-     * @return
-     */
-    private Config convertPipelineOptionsToConfig(StormPipelineOptions options) {
-        Config config = new Config();
-        if (options.getLocalMode())
-            config.put(Config.STORM_CLUSTER_MODE, "local");
-        else
-            config.put(Config.STORM_CLUSTER_MODE, "distributed");
-        
-        Config.setNumWorkers(config, options.getWorkerNumber());
-
-        config.putAll(options.getTopologyConfig());
-
-        // Setup config for runtime env
-        config.put("worker.external", "beam");
-        config.put("topology.acker.executors", 0);
-
-        UnmodifiableCollectionsSerializer.registerSerializers(config);
-        // register classes of guava utils, ImmutableList, ImmutableSet, ImmutableMap
-        ImmutableListSerializer.registerSerializers(config);
-        SdkRepackImmuListSerializer.registerSerializers(config);
-        ImmutableSetSerializer.registerSerializers(config);
-        SdkRepackImmuSetSerializer.registerSerializers(config);
-        ImmutableMapSerializer.registerSerializers(config);
-
-        config.registerDefaultSerailizer(KvStoreIterable.class, KvStoreIterableSerializer.class);
-        return config;
-    }
-
-    @Override
-    public StormPipelineResult run(Pipeline pipeline) {
-        LOG.info("Running pipeline...");
-        TranslationContext context = new TranslationContext(this.options);
-        StormPipelineTranslator transformer = new StormPipelineTranslator(context);
-        transformer.translate(pipeline);
-        LOG.info("UserGraphContext=\n{}", context.getUserGraphContext());
-        LOG.info("ExecutionGraphContext=\n{}", context.getExecutionGraphContext());
-
-        for (Stream stream : context.getExecutionGraphContext().getStreams()) {
-            LOG.info(stream.getProducer().getComponentId() + " --> " + stream.getConsumer().getComponentId());
-        }
-
-        String topologyName = options.getJobName();
-        Config config = convertPipelineOptionsToConfig(options);
-
-        return runTopology(
-                topologyName,
-                getTopology(options, context.getExecutionGraphContext()),
-                config);
-    }
-
-    private StormPipelineResult runTopology(String topologyName, StormTopology topology, Config config) {
-        try {
-            if (StormConfig.local_mode(config)) {
-                LocalCluster localCluster = LocalCluster.getInstance();
-                localCluster.submitTopology(topologyName, config, topology);
-                return new LocalStormPipelineResult(
-                        topologyName, config, localCluster, options.getLocalModeExecuteTime());
-            } else {
-                StormSubmitter.submitTopology(topologyName, config, topology);
-                return null;
-            }
-        } catch (Exception e) {
-            LOG.warn("Fail to submit topology", e);
-            throw new RuntimeException("Fail to submit topology", e);
-        }
-    }
-
-    public static abstract class StormPipelineResult implements PipelineResult {
-
-        private final String topologyName;
-        private final Config config;
-
-        StormPipelineResult(String topologyName, Config config) {
-            this.config = checkNotNull(config, "config");
-            this.topologyName = checkNotNull(topologyName, "topologyName");
-        }
-
-        public State getState() {
-            return null;
-        }
-
-        public Config getConfig() {
-            return config;
-        }
-
-        public String getTopologyName() {
-            return topologyName;
-        }
-    }
-
-    public static class LocalStormPipelineResult extends StormPipelineResult {
-
-        private LocalCluster localCluster;
-        private long localModeExecuteTimeSecs;
-
-        LocalStormPipelineResult(
-                String topologyName,
-                Config config,
-                LocalCluster localCluster,
-                long localModeExecuteTimeSecs) {
-            super(topologyName, config);
-            this.localCluster = checkNotNull(localCluster, "localCluster");
-        }
-
-        @Override
-        public State cancel() throws IOException {
-            //localCluster.deactivate(getTopologyName());
-            localCluster.killTopology(getTopologyName());
-            localCluster.shutdown();
-            JStormUtils.sleepMs(1000);
-            return State.CANCELLED;
-        }
-
-        @Override
-        public State waitUntilFinish(Duration duration) {
-            return waitUntilFinish();
-        }
-
-        @Override
-        public State waitUntilFinish() {
-            JStormUtils.sleepMs(localModeExecuteTimeSecs * 1000);
-            try {
-                return cancel();
-            } catch (IOException e) {
-                throw new RuntimeException(e);
-            }
-        }
-
-        @Override
-        public MetricResults metrics() {
-            return null;
-        }
-    }
-
-    private AbstractComponent getComponent(String id, TranslationContext.ExecutionGraphContext context) {
-        AbstractComponent component = null;
-        AdaptorBasicSpout spout = context.getSpout(id);
-        if (spout != null) {
-            component = spout;
-        } else {
-            AdaptorBasicBolt bolt = context.getBolt(id);
-            if (bolt != null)
-                component = bolt;
-        }
-
-        return component;
-    }
-
-    private StormTopology getTopology(StormPipelineOptions options, TranslationContext.ExecutionGraphContext context) {
-        boolean isExactlyOnce = options.getExactlyOnceTopology();
-        TopologyBuilder builder = isExactlyOnce ? new TransactionTopologyBuilder() : new TopologyBuilder();
-
-        int parallelismNumber = options.getParallelismNumber();
-        Map<String, AdaptorBasicSpout> spouts = context.getSpouts();
-        for (String id : spouts.keySet()) {
-            IRichSpout spout = getSpout(isExactlyOnce, spouts.get(id));
-            builder.setSpout(id, spout, getParallelismNum(spouts.get(id), parallelismNumber));
-        }
-
-        HashMap<String, BoltDeclarer> declarers = new HashMap<>();
-        Iterable<Stream> streams = context.getStreams();
-        LOG.info("streams=" + streams);
-        for (Stream stream : streams) {
-            String destBoltId = stream.getConsumer().getComponentId();
-            IRichBolt bolt = getBolt(isExactlyOnce, context.getBolt(destBoltId));
-            BoltDeclarer declarer = declarers.get(destBoltId);
-            if (declarer == null) {
-                declarer = builder.setBolt(destBoltId, bolt,
-                        getParallelismNum(context.getBolt(destBoltId), parallelismNumber));
-                declarers.put(destBoltId, declarer);
-            }
-
-            Stream.Grouping grouping = stream.getConsumer().getGrouping();
-            String streamId = stream.getProducer().getStreamId();
-            String srcBoltId = stream.getProducer().getComponentId();
-
-            // add stream output declare for "from" component
-            AbstractComponent component = getComponent(srcBoltId, context);
-            if (grouping.getType().equals(Stream.Grouping.Type.FIELDS))
-                component.addKVOutputField(streamId);
-            else
-                component.addOutputField(streamId);
-
-            // "to" component declares grouping to "from" component 
-            switch (grouping.getType()) {
-            case SHUFFLE:
-                declarer.shuffleGrouping(srcBoltId, streamId);
-                break;
-            case FIELDS:
-                declarer.fieldsGrouping(srcBoltId, streamId, new Fields(grouping.getFields()));
-                break;
-            case ALL:
-                declarer.allGrouping(srcBoltId, streamId);
-                break;
-            case DIRECT:
-                declarer.directGrouping(srcBoltId, streamId);
-                break;
-            case GLOBAL:
-                declarer.globalGrouping(srcBoltId, streamId);
-                break;
-            case LOCAL_OR_SHUFFLE:
-                declarer.localOrShuffleGrouping(srcBoltId, streamId);
-                break;
-            case NONE:
-                declarer.noneGrouping(srcBoltId, streamId);
-                break;
-            default:
-                throw new UnsupportedOperationException("unsupported grouping type: " + grouping);
-            }
-
-            // Subscribe grouping of water mark stream
-            component.addOutputField(CommonInstance.BEAM_WATERMARK_STREAM_ID);
-            declarer.allGrouping(srcBoltId, CommonInstance.BEAM_WATERMARK_STREAM_ID);
-        }
-
-        if (isExactlyOnce) {
-            ((TransactionTopologyBuilder) builder).enableHdfs();
-        }
-        return builder.createTopology();
-    }
-
-    private IRichSpout getSpout(boolean isExactlyOnce, IRichSpout spout) {
-        IRichSpout ret = null;
-        if (isExactlyOnce) {
-            if (spout instanceof UnboundedSourceSpout) {
-                ret = new TxUnboundedSourceSpout((UnboundedSourceSpout) spout);
-            } else {
-                String error = String.format("The specified type(%s) is not supported in exactly once mode yet!", spout.getClass().toString());
-                throw new RuntimeException(error);
-            }
-        } else {
-            ret = spout;
-        }
-        return ret;
-    }
-
-    private IRichBolt getBolt(boolean isExactlyOnce, ExecutorsBolt bolt) {
-        return isExactlyOnce ? new TxExecutorsBolt(bolt) : bolt;
-    }
-
-    /**
-     * Calculate the final parallelism number according to the configured number and global number.
-     * @param component
-     * @param globalParallelismNum
-     * @return final parallelism number for the specified component
-     */
-    private int getParallelismNum(AbstractComponent component, int globalParallelismNum) {
-        int configParallelismNum = component.getParallelismNum();
-        return configParallelismNum > 0 ? configParallelismNum : globalParallelismNum;
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/TestJStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/TestJStormRunner.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/TestJStormRunner.java
deleted file mode 100644
index cea79a1..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/TestJStormRunner.java
+++ /dev/null
@@ -1,122 +0,0 @@
-package com.alibaba.jstorm.beam;
-
-import avro.shaded.com.google.common.collect.Maps;
-import com.alibaba.jstorm.cache.KvStoreManagerFactory;
-import com.alibaba.jstorm.client.ConfigExtension;
-import com.alibaba.jstorm.common.metric.AsmMetric;
-import com.alibaba.jstorm.metric.*;
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.google.common.base.Optional;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.PipelineRunner;
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.testing.PAssert;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Map;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Test JStorm runner.
- */
-public class TestJStormRunner extends PipelineRunner<StormRunner.StormPipelineResult> {
-
-    private static final Logger LOG = LoggerFactory.getLogger(TestJStormRunner.class);
-
-    public static TestJStormRunner fromOptions(PipelineOptions options) {
-        return new TestJStormRunner(options.as(StormPipelineOptions.class));
-    }
-
-    private final StormRunner stormRunner;
-    private final StormPipelineOptions options;
-
-    private TestJStormRunner(StormPipelineOptions options) {
-        this.options = options;
-        Map conf = Maps.newHashMap();
-        //conf.put(ConfigExtension.KV_STORE_TYPE, KvStoreManagerFactory.KvStoreType.memory.toString());
-        options.setTopologyConfig(conf);
-        options.setLocalMode(true);
-        stormRunner = StormRunner.fromOptions(checkNotNull(options, "options"));
-    }
-
-    @Override
-    public StormRunner.StormPipelineResult run(Pipeline pipeline) {
-        StormRunner.StormPipelineResult result = stormRunner.run(pipeline);
-
-        try {
-            int numberOfAssertions = PAssert.countAsserts(pipeline);
-
-            LOG.info("Running JStorm job {} with {} expected assertions.", result.getTopologyName(), numberOfAssertions);
-            if(numberOfAssertions == 0) {
-                // If assert number is zero, wait 5 sec
-                JStormUtils.sleepMs(5000);
-                return result;
-            } else {
-                for (int i = 0; i < 40; ++i) {
-                    Optional<Boolean> success = checkForPAssertSuccess(numberOfAssertions);
-                    if (success.isPresent() && success.get()) {
-                        return result;
-                    } else if (success.isPresent() && !success.get()) {
-                        throw new AssertionError("Failed assertion checks.");
-                    } else {
-                        JStormUtils.sleepMs(500);
-                    }
-                }
-                LOG.info("Assertion checks timed out.");
-                throw new AssertionError("Assertion checks timed out.");
-            }
-        } finally {
-            clearPAssertCount();
-            cancel(result);
-        }
-    }
-
-    private Optional<Boolean> checkForPAssertSuccess(int expectedNumberOfAssertions) {
-        int successes = 0;
-        for (AsmMetric metric : JStormMetrics.search(PAssert.SUCCESS_COUNTER, MetaType.TASK, MetricType.COUNTER)) {
-            successes += ((Long) metric.getValue(AsmWindow.M1_WINDOW)).intValue();
-        }
-        int failures = 0;
-        for (AsmMetric metric : JStormMetrics.search(PAssert.FAILURE_COUNTER, MetaType.TASK, MetricType.COUNTER)) {
-            failures += ((Long) metric.getValue(AsmWindow.M1_WINDOW)).intValue();
-        }
-
-        if (failures > 0) {
-            LOG.info("Found {} success, {} failures out of {} expected assertions.",
-                    successes, failures, expectedNumberOfAssertions);
-            return Optional.of(false);
-        } else if (successes >= expectedNumberOfAssertions) {
-            LOG.info("Found {} success, {} failures out of {} expected assertions.",
-                    successes, failures, expectedNumberOfAssertions);
-            return Optional.of(true);
-        }
-
-        LOG.info("Found {} success, {} failures out of {} expected assertions.",
-                successes, failures, expectedNumberOfAssertions);
-        return Optional.absent();
-    }
-
-    private void clearPAssertCount() {
-        String topologyName = options.getJobName();
-        AsmMetricRegistry taskMetrics = JStormMetrics.getTaskMetrics();
-        Iterator<Map.Entry<String, AsmMetric>> itr = taskMetrics.getMetrics().entrySet().iterator();
-        while (itr.hasNext()) {
-            Map.Entry<String, AsmMetric> metric = itr.next();
-            if (metric.getKey().contains(topologyName)) {
-                itr.remove();
-            }
-        }
-    }
-
-    private void cancel(StormRunner.StormPipelineResult result) {
-        try {
-            result.cancel();
-        } catch (IOException e) {
-            throw new RuntimeException("Failed to cancel.", e);
-}
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableListSerializer.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableListSerializer.java
deleted file mode 100644
index 5eabb5f..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableListSerializer.java
+++ /dev/null
@@ -1,92 +0,0 @@
-package com.alibaba.jstorm.beam.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.beam.util.RunnerUtils;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import com.google.common.collect.*;
-
-public class ImmutableListSerializer extends Serializer<ImmutableList<Object>> {
-
-    private static final boolean DOES_NOT_ACCEPT_NULL = false;
-    private static final boolean IMMUTABLE = true;
-
-    public ImmutableListSerializer() {
-        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-    }
-
-    @Override
-    public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
-        output.writeInt(object.size(), true);
-        for (Object elm : object) {
-            kryo.writeClassAndObject(output, elm);
-        }
-    }
-
-    @Override
-    public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
-        final int size = input.readInt(true);
-        final Object[] list = new Object[size];
-        for (int i = 0; i < size; ++i) {
-            list[i] = kryo.readClassAndObject(input);
-        }
-        return ImmutableList.copyOf(list);
-    }
-
-    /**
-     * Creates a new {@link ImmutableListSerializer} and registers its serializer
-     * for the several ImmutableList related classes.
-     */
-    public static void registerSerializers(Config config) {
-
-        // ImmutableList (abstract class)
-        //  +- RegularImmutableList
-        //  |   RegularImmutableList
-        //  +- SingletonImmutableList
-        //  |   Optimized for List with only 1 element.
-        //  +- SubList
-        //  |   Representation for part of ImmutableList
-        //  +- ReverseImmutableList
-        //  |   For iterating in reverse order
-        //  +- StringAsImmutableList
-        //  |   Used by Lists#charactersOf
-        //  +- Values (ImmutableTable values)
-        //      Used by return value of #values() when there are multiple cells
-
-        config.registerSerialization(ImmutableList.class, ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(ImmutableList.class), ImmutableListSerializer.class);
-
-        // Note:
-        //  Only registering above is good enough for serializing/deserializing.
-        //  but if using Kryo#copy, following is required.
-
-        config.registerSerialization(ImmutableList.of().getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of().getClass()), ImmutableListSerializer.class);
-        config.registerSerialization(ImmutableList.of(1).getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of(1).getClass()), ImmutableListSerializer.class);
-        config.registerSerialization(ImmutableList.of(1,2,3).subList(1, 2).getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of(1,2,3).subList(1, 2).getClass()), ImmutableListSerializer.class);
-        config.registerSerialization(ImmutableList.of().reverse().getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of().reverse().getClass()), ImmutableListSerializer.class);
-
-        config.registerSerialization(Lists.charactersOf("KryoRocks").getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(Lists.charactersOf("KryoRocks").getClass()), ImmutableListSerializer.class);
-
-        Table<Integer,Integer,Integer> baseTable = HashBasedTable.create();
-        baseTable.put(1, 2, 3);
-        baseTable.put(4, 5, 6);
-        Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
-        config.registerSerialization(table.values().getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(table.values().getClass()), ImmutableListSerializer.class);
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableMapSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableMapSerializer.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableMapSerializer.java
deleted file mode 100644
index 9980292..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableMapSerializer.java
+++ /dev/null
@@ -1,62 +0,0 @@
-package com.alibaba.jstorm.beam.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.beam.util.RunnerUtils;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Maps;
-
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.Map;
-
-public class ImmutableMapSerializer extends Serializer<ImmutableMap<Object, ? extends Object>> {
-
-    private static final boolean DOES_NOT_ACCEPT_NULL = true;
-    private static final boolean IMMUTABLE = true;
-
-    public ImmutableMapSerializer() {
-        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-    }
-
-    @Override
-    public void write(Kryo kryo, Output output, ImmutableMap<Object, ? extends Object> immutableMap) {
-        kryo.writeObject(output, Maps.newHashMap(immutableMap));
-    }
-
-    @Override
-    public ImmutableMap<Object, Object> read(Kryo kryo, Input input, Class<ImmutableMap<Object, ? extends Object>> type) {
-        Map map = kryo.readObject(input, HashMap.class);
-        return ImmutableMap.copyOf(map);
-    }
-
-    /**
-     * Creates a new {@link ImmutableMapSerializer} and registers its serializer
-     * for the several ImmutableMap related classes.
-     */
-    public static void registerSerializers(Config config) {
-
-        config.registerSerialization(ImmutableMap.class, ImmutableMapSerializer.class);
-        config.registerSerialization(ImmutableMap.of().getClass(), ImmutableMapSerializer.class);
-
-        Object o1 = new Object();
-        Object o2 = new Object();
-
-        config.registerSerialization(ImmutableMap.of(o1, o1).getClass(), ImmutableMapSerializer.class);
-        config.registerSerialization(ImmutableMap.of(o1, o1, o2, o2).getClass(), ImmutableMapSerializer.class);
-        Map<DummyEnum,Object> enumMap = new EnumMap<DummyEnum, Object>(DummyEnum.class);
-        for (DummyEnum e : DummyEnum.values()) {
-            enumMap.put(e, o1);
-        }
-
-        config.registerSerialization(ImmutableMap.copyOf(enumMap).getClass(), ImmutableMapSerializer.class);
-    }
-
-    private enum DummyEnum {
-        VALUE1,
-        VALUE2
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableSetSerializer.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableSetSerializer.java
deleted file mode 100644
index a0d1627..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/ImmutableSetSerializer.java
+++ /dev/null
@@ -1,72 +0,0 @@
-package com.alibaba.jstorm.beam.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.beam.util.RunnerUtils;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Sets;
-
-public class ImmutableSetSerializer extends Serializer<ImmutableSet<Object>> {
-
-    private static final boolean DOES_NOT_ACCEPT_NULL = false;
-    private static final boolean IMMUTABLE = true;
-
-    public ImmutableSetSerializer() {
-        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-    }
-
-    @Override
-    public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
-        output.writeInt(object.size(), true);
-        for (Object elm : object) {
-            kryo.writeClassAndObject(output, elm);
-        }
-    }
-
-    @Override
-    public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
-        final int size = input.readInt(true);
-        ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
-        for (int i = 0; i < size; ++i) {
-            builder.add(kryo.readClassAndObject(input));
-        }
-        return builder.build();
-    }
-
-    /**
-     * Creates a new {@link ImmutableSetSerializer} and registers its serializer
-     * for the several ImmutableSet related classes.
-     */
-    public static void registerSerializers(Config config) {
-
-        // ImmutableList (abstract class)
-        //  +- EmptyImmutableSet
-        //  |   EmptyImmutableSet
-        //  +- SingletonImmutableSet
-        //  |   Optimized for Set with only 1 element.
-        //  +- RegularImmutableSet
-        //  |   RegularImmutableList
-        //  +- EnumImmutableSet
-        //  |   EnumImmutableSet
-
-        config.registerSerialization(ImmutableSet.class, ImmutableSetSerializer.class);
-
-        // Note:
-        //  Only registering above is good enough for serializing/deserializing.
-        //  but if using Kryo#copy, following is required.
-
-        config.registerSerialization(ImmutableSet.of().getClass(), ImmutableSetSerializer.class);
-        config.registerSerialization(ImmutableSet.of(1).getClass(), ImmutableSetSerializer.class);
-        config.registerSerialization(ImmutableSet.of(1,2,3).getClass(), ImmutableSetSerializer.class);
-
-        config.registerSerialization(
-                Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(), ImmutableSetSerializer.class);
-    }
-
-    private enum SomeEnum {
-        A, B, C
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/KvStoreIterableSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/KvStoreIterableSerializer.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/KvStoreIterableSerializer.java
deleted file mode 100644
index 1207763..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/KvStoreIterableSerializer.java
+++ /dev/null
@@ -1,55 +0,0 @@
-package com.alibaba.jstorm.beam.serialization;
-
-import com.alibaba.jstorm.cache.KvStoreIterable;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import com.google.common.collect.Lists;
-
-import java.util.Iterator;
-import java.util.List;
-
-public class KvStoreIterableSerializer extends Serializer<KvStoreIterable<Object>> {
-
-    public KvStoreIterableSerializer() {
-
-    }
-
-    @Override
-    public void write(Kryo kryo, Output output, KvStoreIterable<Object> object) {
-        List<Object> values = Lists.newArrayList(object);
-        output.writeInt(values.size(), true);
-        for (Object elm : object) {
-            kryo.writeClassAndObject(output, elm);
-        }
-    }
-
-    @Override
-    public KvStoreIterable<Object> read(Kryo kryo, Input input, Class<KvStoreIterable<Object>> type) {
-        final int size = input.readInt(true);
-        List<Object> values = Lists.newArrayList();
-        for (int i = 0; i < size; ++i) {
-            values.add(kryo.readClassAndObject(input));
-        }
-
-        return new KvStoreIterable<Object>() {
-            Iterable<Object> values;
-
-            @Override
-            public Iterator<Object> iterator() {
-                return values.iterator();
-            }
-
-            public KvStoreIterable init(Iterable<Object> values) {
-                this.values = values;
-                return this;
-            }
-
-            @Override
-            public String toString() {
-                return values.toString();
-            }
-        }.init(values);
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/SdkRepackImmuListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/SdkRepackImmuListSerializer.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/SdkRepackImmuListSerializer.java
deleted file mode 100644
index 2fc2067..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/SdkRepackImmuListSerializer.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package com.alibaba.jstorm.beam.serialization;
-
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.*;
-
-public class SdkRepackImmuListSerializer extends Serializer<ImmutableList<Object>> {
-
-    private static final boolean DOES_NOT_ACCEPT_NULL = false;
-    private static final boolean IMMUTABLE = true;
-
-    public SdkRepackImmuListSerializer() {
-        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-    }
-
-    @Override
-    public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
-        output.writeInt(object.size(), true);
-        for (Object elm : object) {
-            kryo.writeClassAndObject(output, elm);
-        }
-    }
-
-    @Override
-    public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
-        final int size = input.readInt(true);
-        final Object[] list = new Object[size];
-        for (int i = 0; i < size; ++i) {
-            list[i] = kryo.readClassAndObject(input);
-        }
-        return ImmutableList.copyOf(list);
-    }
-
-    /**
-     * Creates a new {@link ImmutableListSerializer} and registers its serializer
-     * for the several ImmutableList related classes.
-     */
-    public static void registerSerializers(Config config) {
-
-        // ImmutableList (abstract class)
-        //  +- RegularImmutableList
-        //  |   RegularImmutableList
-        //  +- SingletonImmutableList
-        //  |   Optimized for List with only 1 element.
-        //  +- SubList
-        //  |   Representation for part of ImmutableList
-        //  +- ReverseImmutableList
-        //  |   For iterating in reverse order
-        //  +- StringAsImmutableList
-        //  |   Used by Lists#charactersOf
-        //  +- Values (ImmutableTable values)
-        //      Used by return value of #values() when there are multiple cells
-
-        config.registerSerialization(ImmutableList.class, SdkRepackImmuListSerializer.class);
-
-        // Note:
-        //  Only registering above is good enough for serializing/deserializing.
-        //  but if using Kryo#copy, following is required.
-
-        config.registerSerialization(ImmutableList.of().getClass(), SdkRepackImmuListSerializer.class);
-        config.registerSerialization(ImmutableList.of(1).getClass(), SdkRepackImmuListSerializer.class);
-        config.registerSerialization(ImmutableList.of(1,2,3).subList(1, 2).getClass(), SdkRepackImmuListSerializer.class);
-        config.registerSerialization(ImmutableList.of().reverse().getClass(), SdkRepackImmuListSerializer.class);
-
-        config.registerSerialization(Lists.charactersOf("KryoRocks").getClass(), SdkRepackImmuListSerializer.class);
-
-        Table<Integer,Integer,Integer> baseTable = HashBasedTable.create();
-        baseTable.put(1, 2, 3);
-        baseTable.put(4, 5, 6);
-        Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
-        config.registerSerialization(table.values().getClass(), SdkRepackImmuListSerializer.class);
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/SdkRepackImmuSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/SdkRepackImmuSetSerializer.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/SdkRepackImmuSetSerializer.java
deleted file mode 100644
index 0cbcad9..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/SdkRepackImmuSetSerializer.java
+++ /dev/null
@@ -1,72 +0,0 @@
-package com.alibaba.jstorm.beam.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.beam.util.RunnerUtils;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.*;
-
-public class SdkRepackImmuSetSerializer extends Serializer<ImmutableSet<Object>> {
-
-    private static final boolean DOES_NOT_ACCEPT_NULL = false;
-    private static final boolean IMMUTABLE = true;
-
-    public SdkRepackImmuSetSerializer() {
-        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-    }
-
-    @Override
-    public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
-        output.writeInt(object.size(), true);
-        for (Object elm : object) {
-            kryo.writeClassAndObject(output, elm);
-        }
-    }
-
-    @Override
-    public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
-        final int size = input.readInt(true);
-        ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
-        for (int i = 0; i < size; ++i) {
-            builder.add(kryo.readClassAndObject(input));
-        }
-        return builder.build();
-    }
-
-    /**
-     * Creates a new {@link ImmutableSetSerializer} and registers its serializer
-     * for the several ImmutableSet related classes.
-     */
-    public static void registerSerializers(Config config) {
-
-        // ImmutableList (abstract class)
-        //  +- EmptyImmutableSet
-        //  |   EmptyImmutableSet
-        //  +- SingletonImmutableSet
-        //  |   Optimized for Set with only 1 element.
-        //  +- RegularImmutableSet
-        //  |   RegularImmutableList
-        //  +- EnumImmutableSet
-        //  |   EnumImmutableSet
-
-        config.registerSerialization(ImmutableSet.class, SdkRepackImmuSetSerializer.class);
-
-        // Note:
-        //  Only registering above is good enough for serializing/deserializing.
-        //  but if using Kryo#copy, following is required.
-
-        config.registerSerialization(ImmutableSet.of().getClass(), SdkRepackImmuSetSerializer.class);
-        config.registerSerialization(ImmutableSet.of(1).getClass(), SdkRepackImmuSetSerializer.class);
-        config.registerSerialization(ImmutableSet.of(1,2,3).getClass(), SdkRepackImmuSetSerializer.class);
-
-        config.registerSerialization(
-                Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(), SdkRepackImmuSetSerializer.class);
-    }
-
-    private enum SomeEnum {
-        A, B, C
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/UnmodifiableCollectionsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/UnmodifiableCollectionsSerializer.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/UnmodifiableCollectionsSerializer.java
deleted file mode 100644
index 1eb7146..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/serialization/UnmodifiableCollectionsSerializer.java
+++ /dev/null
@@ -1,159 +0,0 @@
-package com.alibaba.jstorm.beam.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-
-import java.lang.reflect.Field;
-import java.util.*;
-
-public class UnmodifiableCollectionsSerializer extends Serializer<Object> {
-
-    private static final Field SOURCE_COLLECTION_FIELD;
-    private static final Field SOURCE_MAP_FIELD;
-
-    static {
-        try {
-            SOURCE_COLLECTION_FIELD = Class.forName("java.util.Collections$UnmodifiableCollection" )
-                    .getDeclaredField( "c" );
-            SOURCE_COLLECTION_FIELD.setAccessible( true );
-
-
-            SOURCE_MAP_FIELD = Class.forName("java.util.Collections$UnmodifiableMap" )
-                    .getDeclaredField( "m" );
-            SOURCE_MAP_FIELD.setAccessible( true );
-        } catch ( final Exception e ) {
-            throw new RuntimeException( "Could not access source collection" +
-                    " field in java.util.Collections$UnmodifiableCollection.", e );
-        }
-    }
-
-    @Override
-    public Object read(final Kryo kryo, final Input input, final Class<Object> clazz) {
-        final int ordinal = input.readInt( true );
-        final UnmodifiableCollection unmodifiableCollection = UnmodifiableCollection.values()[ordinal];
-        final Object sourceCollection = kryo.readClassAndObject( input );
-        return unmodifiableCollection.create( sourceCollection );
-    }
-
-    @Override
-    public void write(final Kryo kryo, final Output output, final Object object) {
-        try {
-            final UnmodifiableCollection unmodifiableCollection = UnmodifiableCollection.valueOfType( object.getClass() );
-            // the ordinal could be replaced by s.th. else (e.g. a explicitely managed "id")
-            output.writeInt( unmodifiableCollection.ordinal(), true );
-            kryo.writeClassAndObject( output, unmodifiableCollection.sourceCollectionField.get( object ) );
-        } catch ( final RuntimeException e ) {
-            // Don't eat and wrap RuntimeExceptions because the ObjectBuffer.write...
-            // handles SerializationException specifically (resizing the buffer)...
-            throw e;
-        } catch ( final Exception e ) {
-            throw new RuntimeException( e );
-        }
-    }
-
-    @Override
-    public Object copy(Kryo kryo, Object original) {
-        try {
-            final UnmodifiableCollection unmodifiableCollection = UnmodifiableCollection.valueOfType( original.getClass() );
-            Object sourceCollectionCopy = kryo.copy(unmodifiableCollection.sourceCollectionField.get(original));
-            return unmodifiableCollection.create( sourceCollectionCopy );
-        } catch ( final RuntimeException e ) {
-            // Don't eat and wrap RuntimeExceptions
-            throw e;
-        } catch ( final Exception e ) {
-            throw new RuntimeException( e );
-        }
-    }
-
-    private static enum UnmodifiableCollection {
-        COLLECTION( Collections.unmodifiableCollection( Arrays.asList( "" ) ).getClass(), SOURCE_COLLECTION_FIELD ){
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableCollection( (Collection<?>) sourceCollection );
-            }
-        },
-        RANDOM_ACCESS_LIST( Collections.unmodifiableList( new ArrayList<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableList( (List<?>) sourceCollection );
-            }
-        },
-        LIST( Collections.unmodifiableList( new LinkedList<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableList( (List<?>) sourceCollection );
-            }
-        },
-        SET( Collections.unmodifiableSet( new HashSet<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableSet( (Set<?>) sourceCollection );
-            }
-        },
-        SORTED_SET( Collections.unmodifiableSortedSet( new TreeSet<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableSortedSet( (SortedSet<?>) sourceCollection );
-            }
-        },
-        MAP( Collections.unmodifiableMap( new HashMap<Void, Void>() ).getClass(), SOURCE_MAP_FIELD ) {
-
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableMap( (Map<?, ?>) sourceCollection );
-            }
-
-        },
-        SORTED_MAP( Collections.unmodifiableSortedMap( new TreeMap<Void, Void>() ).getClass(), SOURCE_MAP_FIELD ) {
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableSortedMap( (SortedMap<?, ?>) sourceCollection );
-            }
-        };
-
-        private final Class<?> type;
-        private final Field sourceCollectionField;
-
-        private UnmodifiableCollection( final Class<?> type, final Field sourceCollectionField ) {
-            this.type = type;
-            this.sourceCollectionField = sourceCollectionField;
-        }
-
-        /**
-         * @param sourceCollection
-         */
-        public abstract Object create( Object sourceCollection );
-
-        static UnmodifiableCollection valueOfType(final Class<?> type ) {
-            for( final UnmodifiableCollection item : values() ) {
-                if ( item.type.equals( type ) ) {
-                    return item;
-                }
-            }
-            throw new IllegalArgumentException( "The type " + type + " is not supported." );
-        }
-
-    }
-
-    /**
-     * Creates a new {@link UnmodifiableCollectionsSerializer} and registers its serializer
-     * for the several unmodifiable Collections that can be created via {@link Collections},
-     * including {@link Map}s.
-     *
-     * @see Collections#unmodifiableCollection(Collection)
-     * @see Collections#unmodifiableList(List)
-     * @see Collections#unmodifiableSet(Set)
-     * @see Collections#unmodifiableSortedSet(SortedSet)
-     * @see Collections#unmodifiableMap(Map)
-     * @see Collections#unmodifiableSortedMap(SortedMap)
-     */
-    public static void registerSerializers( Config config ) {
-        UnmodifiableCollection.values();
-        for ( final UnmodifiableCollection item : UnmodifiableCollection.values() ) {
-            config.registerSerialization( item.type, UnmodifiableCollectionsSerializer.class );
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/StormPipelineTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/StormPipelineTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/StormPipelineTranslator.java
deleted file mode 100644
index 7eae1da..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/StormPipelineTranslator.java
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation;
-
-import com.alibaba.jstorm.beam.translation.translator.ViewTranslator;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Iterables;
-import org.apache.beam.runners.core.construction.PTransformMatchers;
-import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.runners.PTransformMatcher;
-import org.apache.beam.sdk.runners.PTransformOverride;
-import org.apache.beam.sdk.runners.PTransformOverrideFactory;
-import org.apache.beam.sdk.runners.TransformHierarchy;
-import org.apache.beam.sdk.runners.AppliedPTransform;
-import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.transforms.View;
-import org.apache.beam.sdk.util.InstanceBuilder;
-import org.apache.beam.sdk.values.PValue;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.jstorm.beam.translation.translator.TransformTranslator;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * Pipleline translator of Storm
- */
-public class StormPipelineTranslator extends Pipeline.PipelineVisitor.Defaults {
-    private static final Logger LOG = LoggerFactory.getLogger(StormPipelineTranslator.class);
-    private TranslationContext context;
-    private int depth = 0;
-
-    public StormPipelineTranslator(TranslationContext context) {
-        this.context = context;
-    }
-
-    public void translate(Pipeline pipeline) {
-        List<PTransformOverride> transformOverrides =
-                ImmutableList.<PTransformOverride>builder()
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsIterable.class),
-                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsIterable.class)))
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsList.class),
-                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsList.class)))
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMap.class),
-                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMap.class)))
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMultimap.class),
-                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMultimap.class)))
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsSingleton.class),
-                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsSingleton.class)))
-                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(Combine.GloballyAsSingletonView.class),
-                               new ReflectiveOneToOneOverrideFactory((ViewTranslator.CombineGloballyAsSingletonView.class))))
-                        .build();
-        pipeline.replaceAll(transformOverrides);
-        pipeline.traverseTopologically(this);
-    }
-
-    @Override
-    public CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {
-        LOG.info(genSpaces(this.depth) + "enterCompositeTransform- " + node);
-        this.depth++;
-
-        // check if current composite transforms need to be translated. 
-        // If not, all sub transforms will be translated in visitPrimitiveTransform.
-        PTransform<?, ?> transform = node.getTransform();
-        if (transform != null) {
-            TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
-
-            if (translator != null && applyCanTranslate(transform, node, translator)) {
-                applyStreamingTransform(transform, node, translator);
-                LOG.info(genSpaces(this.depth) + "translated-" + node);
-                return CompositeBehavior.DO_NOT_ENTER_TRANSFORM;
-            }
-        }
-        return CompositeBehavior.ENTER_TRANSFORM;
-    }
-
-    public void leaveCompositeTransform(TransformHierarchy.Node node) {
-        this.depth--;
-        LOG.info(genSpaces(this.depth) + "leaveCompositeTransform- " + node);
-    }
-
-    public void visitPrimitiveTransform(TransformHierarchy.Node node) {
-        LOG.info(genSpaces(this.depth) + "visitPrimitiveTransform- " + node);
-
-        if (!node.isRootNode()) {
-            PTransform<?, ?> transform = node.getTransform();
-            TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
-            if (translator == null || !applyCanTranslate(transform, node, translator)) {
-                LOG.info(node.getTransform().getClass().toString());
-                throw new UnsupportedOperationException("The transform " + transform + " is currently not supported.");
-            }
-            applyStreamingTransform(transform, node, translator);
-        }
-    }
-
-    public void visitValue(PValue value, TransformHierarchy.Node node) {
-        LOG.info(genSpaces(this.depth) + "visiting value {}", value);
-    }
-
-    private <T extends PTransform<?, ?>> void applyStreamingTransform(PTransform<?, ?> transform, TransformHierarchy.Node node,
-            TransformTranslator<?> translator) {
-        @SuppressWarnings("unchecked")
-        T typedTransform = (T) transform;
-        @SuppressWarnings("unchecked")
-        TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
-
-        context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
-        typedTranslator.translateNode(typedTransform, context);
-
-        // Maintain PValue to TupleTag map for side inputs translation.
-        context.getUserGraphContext().recordOutputTaggedPValue();
-    }
-
-    private <T extends PTransform<?, ?>> boolean applyCanTranslate(PTransform<?, ?> transform, TransformHierarchy.Node node, TransformTranslator<?> translator) {
-        @SuppressWarnings("unchecked")
-        T typedTransform = (T) transform;
-        @SuppressWarnings("unchecked")
-        TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
-
-        context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
-
-        return typedTranslator.canTranslate(typedTransform, context);
-    }
-
-    /**
-     * Utility formatting method.
-     * 
-     * @param n number of spaces to generate
-     * @return String with "|" followed by n spaces
-     */
-    protected static String genSpaces(int n) {
-        StringBuilder builder = new StringBuilder();
-        for (int i = 0; i < n; i++) {
-            builder.append("|   ");
-        }
-        return builder.toString();
-    }
-
-    private static class ReflectiveOneToOneOverrideFactory<
-            InputT extends PValue,
-            OutputT extends PValue,
-            TransformT extends PTransform<InputT, OutputT>>
-            extends SingleInputOutputOverrideFactory<InputT, OutputT, TransformT> {
-        private final Class<PTransform<InputT, OutputT>> replacement;
-
-        private ReflectiveOneToOneOverrideFactory(
-                Class<PTransform<InputT, OutputT>> replacement) {
-            this.replacement = replacement;
-        }
-
-        @Override
-        public PTransformReplacement<InputT, OutputT> getReplacementTransform(AppliedPTransform<InputT, OutputT, TransformT> appliedPTransform) {
-            PTransform<InputT, OutputT> originalPTransform = appliedPTransform.getTransform();
-            PTransform<InputT, OutputT> replacedPTransform = InstanceBuilder.ofType(replacement)
-                    .withArg((Class<PTransform<InputT, OutputT>>) originalPTransform.getClass(), originalPTransform)
-                    .build();
-            InputT inputT = (InputT) Iterables.getOnlyElement(appliedPTransform.getInputs().values());
-            return PTransformReplacement.of(inputT, replacedPTransform);
-        }
-    }
-}


[47/53] [abbrv] beam git commit: jstorm-runner: 1. Generate execution DAG for runtime 2. Restructure Kryo serializers

Posted by pe...@apache.org.
jstorm-runner:
1. Generate execution DAG for runtime
2. Restructure Kryo serializers


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/6078cbc6
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/6078cbc6
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/6078cbc6

Branch: refs/heads/jstorm-runner
Commit: 6078cbc6bd5ca6e48e237c652c532b189acef2b7
Parents: 240f61b
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Wed Aug 9 16:48:42 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:03:00 2017 +0800

----------------------------------------------------------------------
 runners/jstorm/pom.xml                          |   8 +-
 .../beam/runners/jstorm/JStormRunner.java       | 169 +++++++++++--
 .../BeamSdkRepackUtilsSerializer.java           | 253 +++++++++++++++++++
 .../serialization/BeamUtilsSerializer.java      | 114 +++++++++
 .../serialization/CollectionsSerializer.java    |  60 -----
 .../serialization/GuavaUtilsSerializer.java     | 252 ++++++++++++++++++
 .../serialization/ImmutableListSerializer.java  | 106 --------
 .../serialization/ImmutableMapSerializer.java   |  87 -------
 .../serialization/ImmutableSetSerializer.java   |  92 -------
 .../serialization/JStormUtilsSerializer.java    | 126 +++++++++
 .../serialization/JavaUtilsSerializer.java      | 236 +++++++++++++++++
 .../KvStoreIterableSerializer.java              |  74 ------
 .../SdkRepackImmuListSerializer.java            | 107 --------
 .../SdkRepackImmuSetSerializer.java             |  95 -------
 .../SdkRepackImmutableMapSerializer.java        |  90 -------
 .../UnmodifiableCollectionsSerializer.java      | 201 ---------------
 .../translation/BoundedSourceTranslator.java    |   1 +
 .../jstorm/translation/DoFnExecutor.java        |   2 +-
 .../runners/jstorm/translation/Executor.java    |   2 +-
 .../jstorm/translation/ExecutorsBolt.java       |  35 ++-
 .../jstorm/translation/FlattenTranslator.java   |   1 +
 .../translation/JStormStateInternals.java       |  24 +-
 .../jstorm/translation/TranslationContext.java  |  19 +-
 .../translation/UnboundedSourceSpout.java       |  12 +
 .../translation/UnboundedSourceTranslator.java  |   1 +
 25 files changed, 1203 insertions(+), 964 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/pom.xml
----------------------------------------------------------------------
diff --git a/runners/jstorm/pom.xml b/runners/jstorm/pom.xml
index 79634e9..75387ef 100644
--- a/runners/jstorm/pom.xml
+++ b/runners/jstorm/pom.xml
@@ -53,9 +53,6 @@
                   <goal>test</goal>
                 </goals>
                 <configuration>
-                  <!--<includes>
-                    <include>org.apache.beam.sdk.transforms.CombineTest.java</include>
-                  </includes>-->
                   <groups>
                     org.apache.beam.sdk.testing.ValidatesRunner
                   </groups>
@@ -144,6 +141,11 @@
         <groupId>com.google.auto.value</groupId>
         <artifactId>auto-value</artifactId>
     </dependency>
+    <dependency>
+        <groupId>com.googlecode.json-simple</groupId>
+        <artifactId>json-simple</artifactId>
+        <version>1.1</version>
+    </dependency>
 
     <!-- Depend on test jar to scan for ValidatesRunner tests -->
     <dependency>

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
index 56db1c6..47de42c 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
@@ -26,22 +26,25 @@ import backtype.storm.topology.IRichBolt;
 import backtype.storm.topology.IRichSpout;
 import backtype.storm.topology.TopologyBuilder;
 import backtype.storm.tuple.Fields;
-import com.alibaba.jstorm.cache.KvStoreIterable;
+import com.alibaba.jstorm.client.ConfigExtension;
 import com.alibaba.jstorm.cluster.StormConfig;
 import com.alibaba.jstorm.transactional.TransactionTopologyBuilder;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.Map;
-import org.apache.beam.runners.jstorm.serialization.CollectionsSerializer;
-import org.apache.beam.runners.jstorm.serialization.ImmutableListSerializer;
-import org.apache.beam.runners.jstorm.serialization.ImmutableMapSerializer;
-import org.apache.beam.runners.jstorm.serialization.ImmutableSetSerializer;
-import org.apache.beam.runners.jstorm.serialization.KvStoreIterableSerializer;
-import org.apache.beam.runners.jstorm.serialization.SdkRepackImmuListSerializer;
-import org.apache.beam.runners.jstorm.serialization.SdkRepackImmuSetSerializer;
-import org.apache.beam.runners.jstorm.serialization.SdkRepackImmutableMapSerializer;
-import org.apache.beam.runners.jstorm.serialization.UnmodifiableCollectionsSerializer;
+
+import org.apache.beam.runners.jstorm.serialization.BeamSdkRepackUtilsSerializer;
+import org.apache.beam.runners.jstorm.serialization.BeamUtilsSerializer;
+import org.apache.beam.runners.jstorm.serialization.GuavaUtilsSerializer;
+import org.apache.beam.runners.jstorm.serialization.JStormUtilsSerializer;
+import org.apache.beam.runners.jstorm.serialization.JavaUtilsSerializer;
 import org.apache.beam.runners.jstorm.translation.AbstractComponent;
 import org.apache.beam.runners.jstorm.translation.CommonInstance;
+import org.apache.beam.runners.jstorm.translation.Executor;
 import org.apache.beam.runners.jstorm.translation.ExecutorsBolt;
 import org.apache.beam.runners.jstorm.translation.JStormPipelineTranslator;
 import org.apache.beam.runners.jstorm.translation.Stream;
@@ -53,6 +56,10 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.PipelineRunner;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsValidator;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -98,17 +105,12 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
     config.put("worker.external", "beam");
     config.put("topology.acker.executors", 0);
 
-    UnmodifiableCollectionsSerializer.registerSerializers(config);
-    // register classes of guava utils, ImmutableList, ImmutableSet, ImmutableMap
-    ImmutableListSerializer.registerSerializers(config);
-    SdkRepackImmuListSerializer.registerSerializers(config);
-    ImmutableSetSerializer.registerSerializers(config);
-    SdkRepackImmuSetSerializer.registerSerializers(config);
-    ImmutableMapSerializer.registerSerializers(config);
-    SdkRepackImmutableMapSerializer.registerSerializers(config);
-    CollectionsSerializer.registerSerializers(config);
-
-    config.registerDefaultSerailizer(KvStoreIterable.class, KvStoreIterableSerializer.class);
+    // Register serializers of Kryo
+    GuavaUtilsSerializer.registerSerializers(config);
+    BeamUtilsSerializer.registerSerializers(config);
+    BeamSdkRepackUtilsSerializer.registerSerializers(config);
+    JStormUtilsSerializer.registerSerializers(config);
+    JavaUtilsSerializer.registerSerializers(config);
     return config;
   }
 
@@ -128,6 +130,8 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
 
     String topologyName = options.getJobName();
     Config config = convertPipelineOptionsToConfig(options);
+    ConfigExtension.setTopologyComponentSubgraphDefinition(
+        config, getSubGraphDefintions(context));
 
     return runTopology(
         topologyName,
@@ -135,6 +139,129 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
         config);
   }
 
+  private JSONObject buildNode(String name, String type) {
+    // Node: {name:name, type:tag/transform}
+    JSONObject jsonNode = new JSONObject();
+    jsonNode.put("name", name);
+    jsonNode.put("type", type);
+    return jsonNode;
+  }
+
+  private JSONArray buildEdge(Integer sourceId, Integer targetId) {
+    JSONArray edge = new JSONArray();
+    edge.addAll(Lists.newArrayList(sourceId, targetId));
+    return edge;
+  }
+
+  private String getPValueName(TranslationContext.UserGraphContext userGraphContext,
+                               TupleTag tupleTag) {
+    PValue pValue = userGraphContext.findPValue(tupleTag);
+    int index = pValue.getName().lastIndexOf("/");
+    return pValue.getName().substring(index + 1);
+  }
+
+  private String getSubGraphDefintions(TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    TranslationContext.ExecutionGraphContext executionGraphContext =
+        context.getExecutionGraphContext();
+    JSONObject graph = new JSONObject();
+
+    // Get sub-graphs for spouts
+    for (Map.Entry<String, UnboundedSourceSpout> entry :
+        executionGraphContext.getSpouts().entrySet()) {
+      JSONObject subGraph = new JSONObject();
+
+      // Nodes
+      JSONObject nodes = new JSONObject();
+      nodes.put(1, buildNode(entry.getValue().getName(), "transform"));
+      nodes.put(2, buildNode(
+          getPValueName(userGraphContext, entry.getValue().getOutputTag()), "tag"));
+      subGraph.put("nodes", nodes);
+
+      // Edges
+      JSONArray edges = new JSONArray();
+      edges.add(buildEdge(1, 2));
+      subGraph.put("edges", edges);
+
+      graph.put(entry.getKey(), subGraph);
+    }
+
+    // Get sub-graphs for bolts
+    for (Map.Entry<String, ExecutorsBolt> entry : executionGraphContext.getBolts().entrySet()) {
+      ExecutorsBolt executorsBolt = entry.getValue();
+      Map<Executor, String> executorNames = executorsBolt.getExecutorNames();
+      Map<TupleTag, Executor> inputTagToExecutors = executorsBolt.getExecutors();
+
+      // Sub-Graph
+      JSONObject subGraph = new JSONObject();
+
+      // Nodes
+      JSONObject nodes = new JSONObject();
+      Map<String, Integer> nodeNameToId = Maps.newHashMap();
+      int id = 1;
+      for (Map.Entry<Executor, Collection<TupleTag>> entry1 :
+          executorsBolt.getExecutorToOutputTags().entrySet()) {
+        Executor executor = entry1.getKey();
+        nodes.put(id, buildNode(executorNames.get(executor), "transform"));
+        nodeNameToId.put(executorNames.get(executor), id);
+        id++;
+      }
+      subGraph.put("nodes", nodes);
+
+      Collection<TupleTag> externalOutputTags = executorsBolt.getExternalOutputTags();
+      for (TupleTag outputTag : externalOutputTags) {
+        String name = getPValueName(userGraphContext, outputTag);
+        nodes.put(id, buildNode(name, "tag"));
+        nodeNameToId.put(outputTag.getId(), id);
+        id++;
+      }
+
+      Collection<TupleTag> externalInputTags = Sets.newHashSet(inputTagToExecutors.keySet());
+      externalInputTags.removeAll(executorsBolt.getOutputTags());
+      for (TupleTag inputTag : externalInputTags) {
+        String name = getPValueName(userGraphContext, inputTag);
+        nodes.put(id, buildNode(name, "tag"));
+        nodeNameToId.put(inputTag.getId(), id);
+        id++;
+      }
+
+      // Edges
+      JSONArray edges = new JSONArray();
+      for (Map.Entry<Executor, Collection<TupleTag>> entry1 :
+          executorsBolt.getExecutorToOutputTags().entrySet()) {
+        Executor sourceExecutor = entry1.getKey();
+        Collection<TupleTag> outputTags = entry1.getValue();
+        for (TupleTag tag : outputTags) {
+          if (inputTagToExecutors.containsKey(tag)) {
+            Executor targetExecutor = inputTagToExecutors.get(tag);
+            if (executorNames.containsKey(targetExecutor)) {
+              edges.add(buildEdge(nodeNameToId.get(executorNames.get(sourceExecutor)),
+                  nodeNameToId.get(executorNames.get(targetExecutor))));
+            }
+          }
+          if (externalOutputTags.contains(tag)) {
+            edges.add(buildEdge(nodeNameToId.get(executorNames.get(sourceExecutor)),
+                nodeNameToId.get(tag.getId())));
+          }
+        }
+      }
+      for (TupleTag tag : externalInputTags) {
+        if (inputTagToExecutors.containsKey(tag)) {
+          Executor targetExecutor = inputTagToExecutors.get(tag);
+          if (executorNames.containsKey(targetExecutor)) {
+            edges.add(buildEdge(nodeNameToId.get(tag.getId()),
+                nodeNameToId.get(executorNames.get(targetExecutor))));
+          }
+        }
+      }
+      subGraph.put("edges", edges);
+
+      graph.put(entry.getKey(), subGraph);
+    }
+
+    return graph.toJSONString();
+  }
+
   private JStormRunnerResult runTopology(
       String topologyName,
       StormTopology topology,

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamSdkRepackUtilsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamSdkRepackUtilsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamSdkRepackUtilsSerializer.java
new file mode 100644
index 0000000..4ae47eb
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamSdkRepackUtilsSerializer.java
@@ -0,0 +1,253 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.HashBasedTable;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableList;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableMap;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableSet;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableTable;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.Lists;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.Maps;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.Sets;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.Table;
+
+/**
+ * Specific serializer of {@link Kryo} for Beam SDK repackaged class,
+ * e.g. ImmutableList, ImmutableMap...
+ */
+public class BeamSdkRepackUtilsSerializer {
+
+  /**
+   * Specific serializer of {@link Kryo} for ImmutableList.
+   */
+  public static class ImmutableListSerializer extends Serializer<ImmutableList<Object>> {
+
+    private static final boolean DOES_NOT_ACCEPT_NULL = false;
+    private static final boolean IMMUTABLE = true;
+
+    public ImmutableListSerializer() {
+      super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
+      output.writeInt(object.size(), true);
+      for (Object elm : object) {
+        kryo.writeClassAndObject(output, elm);
+      }
+    }
+
+    @Override
+    public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
+      final int size = input.readInt(true);
+      final Object[] list = new Object[size];
+      for (int i = 0; i < size; ++i) {
+        list[i] = kryo.readClassAndObject(input);
+      }
+      return ImmutableList.copyOf(list);
+    }
+  }
+
+  /**
+   * registers its serializer for the several ImmutableList related classes.
+   */
+  private static void registerImmutableListSerializers(Config config) {
+
+    // ImmutableList (abstract class)
+    //  +- RegularImmutableList
+    //  |   RegularImmutableList
+    //  +- SingletonImmutableList
+    //  |   Optimized for List with only 1 element.
+    //  +- SubList
+    //  |   Representation for part of ImmutableList
+    //  +- ReverseImmutableList
+    //  |   For iterating in reverse order
+    //  +- StringAsImmutableList
+    //  |   Used by Lists#charactersOf
+    //  +- Values (ImmutableTable values)
+    //      Used by return value of #values() when there are multiple cells
+
+    config.registerSerialization(ImmutableList.class, ImmutableListSerializer.class);
+
+    // Note:
+    //  Only registering above is good enough for serializing/deserializing.
+    //  but if using Kryo#copy, following is required.
+
+    config.registerSerialization(ImmutableList.of().getClass(), ImmutableListSerializer.class);
+    config.registerSerialization(ImmutableList.of(1).getClass(), ImmutableListSerializer.class);
+    config.registerSerialization(
+        ImmutableList.of(1, 2, 3).subList(1, 2).getClass(),
+        ImmutableListSerializer.class);
+    config.registerSerialization(
+        ImmutableList.of().reverse().getClass(),
+        ImmutableListSerializer.class);
+
+    config.registerSerialization(
+        Lists.charactersOf("KryoRocks").getClass(),
+        ImmutableListSerializer.class);
+
+    Table<Integer, Integer, Integer> baseTable = HashBasedTable.create();
+    baseTable.put(1, 2, 3);
+    baseTable.put(4, 5, 6);
+    Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
+    config.registerSerialization(table.values().getClass(), ImmutableListSerializer.class);
+  }
+
+
+  /**
+   * Specific serializer of {@link Kryo} for ImmutableMap.
+   */
+  public static class ImmutableMapSerializer extends
+      Serializer<ImmutableMap<Object, ? extends Object>> {
+
+    private static final boolean DOES_NOT_ACCEPT_NULL = true;
+    private static final boolean IMMUTABLE = true;
+
+    public ImmutableMapSerializer() {
+      super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output,
+                      ImmutableMap<Object, ? extends Object> immutableMap) {
+      kryo.writeObject(output, Maps.newHashMap(immutableMap));
+    }
+
+    @Override
+    public ImmutableMap<Object, Object> read(
+        Kryo kryo,
+        Input input,
+        Class<ImmutableMap<Object, ? extends Object>> type) {
+      Map map = kryo.readObject(input, HashMap.class);
+      return ImmutableMap.copyOf(map);
+    }
+  }
+
+  private enum DummyEnum {
+    VALUE1,
+    VALUE2
+  }
+
+  /**
+   * Creates a new {@link ImmutableMapSerializer} and registers its serializer
+   * for the several ImmutableMap related classes.
+   */
+  private static void registerImmutableMapSerializers(Config config) {
+
+    config.registerSerialization(ImmutableMap.class, ImmutableMapSerializer.class);
+    config.registerSerialization(ImmutableMap.of().getClass(), ImmutableMapSerializer.class);
+
+    Object o1 = new Object();
+    Object o2 = new Object();
+
+    config.registerSerialization(ImmutableMap.of(o1, o1).getClass(), ImmutableMapSerializer.class);
+    config.registerSerialization(
+        ImmutableMap.of(o1, o1, o2, o2).getClass(),
+        ImmutableMapSerializer.class);
+    Map<DummyEnum, Object> enumMap = new EnumMap<DummyEnum, Object>(DummyEnum.class);
+    for (DummyEnum e : DummyEnum.values()) {
+      enumMap.put(e, o1);
+    }
+
+    config.registerSerialization(
+        ImmutableMap.copyOf(enumMap).getClass(),
+        ImmutableMapSerializer.class);
+  }
+
+  /**
+   * Specific serializer of {@link Kryo} for ImmutableSet.
+   */
+  public static class ImmutableSetSerializer extends Serializer<ImmutableSet<Object>> {
+
+    private static final boolean DOES_NOT_ACCEPT_NULL = false;
+    private static final boolean IMMUTABLE = true;
+
+    public ImmutableSetSerializer() {
+      super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
+      output.writeInt(object.size(), true);
+      for (Object elm : object) {
+        kryo.writeClassAndObject(output, elm);
+      }
+    }
+
+    @Override
+    public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
+      final int size = input.readInt(true);
+      ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
+      for (int i = 0; i < size; ++i) {
+        builder.add(kryo.readClassAndObject(input));
+      }
+      return builder.build();
+    }
+  }
+
+  private enum SomeEnum {
+    A, B, C
+  }
+
+  /**
+   * Creates a new {@link ImmutableSetSerializer} and registers its serializer
+   * for the several ImmutableSet related classes.
+   */
+  private static void registerImmutableSetSerializers(Config config) {
+
+    // ImmutableList (abstract class)
+    //  +- EmptyImmutableSet
+    //  |   EmptyImmutableSet
+    //  +- SingletonImmutableSet
+    //  |   Optimized for Set with only 1 element.
+    //  +- RegularImmutableSet
+    //  |   RegularImmutableList
+    //  +- EnumImmutableSet
+    //  |   EnumImmutableSet
+
+    config.registerSerialization(ImmutableSet.class, ImmutableSetSerializer.class);
+
+    // Note:
+    //  Only registering above is good enough for serializing/deserializing.
+    //  but if using Kryo#copy, following is required.
+
+    config.registerSerialization(ImmutableSet.of().getClass(), ImmutableSetSerializer.class);
+    config.registerSerialization(ImmutableSet.of(1).getClass(), ImmutableSetSerializer.class);
+    config.registerSerialization(ImmutableSet.of(1, 2, 3).getClass(), ImmutableSetSerializer.class);
+
+    config.registerSerialization(
+        Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(),
+        ImmutableSetSerializer.class);
+  }
+
+  public static void registerSerializers(Config config) {
+    registerImmutableListSerializers(config);
+    registerImmutableMapSerializers(config);
+    registerImmutableSetSerializers(config);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamUtilsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamUtilsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamUtilsSerializer.java
new file mode 100644
index 0000000..db1f037
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/BeamUtilsSerializer.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+import com.google.common.collect.Lists;
+import org.apache.beam.runners.core.StateNamespaces;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
+import org.apache.beam.sdk.transforms.windowing.PaneInfo;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.joda.time.Instant;
+
+/**
+ * Specific serializer of {@link Kryo} for Beam classes.
+ */
+public class BeamUtilsSerializer {
+
+  /**
+   * Serializer for {@link KV}.
+   */
+  public static class KvSerializer extends Serializer<KV> {
+
+    @Override
+    public void write(Kryo kryo, Output output, KV object) {
+      kryo.writeClassAndObject(output, object.getKey());
+      kryo.writeClassAndObject(output, object.getValue());
+    }
+
+    @Override
+    public KV read(Kryo kryo, Input input, Class<KV> type) {
+      return KV.of(kryo.readClassAndObject(input), kryo.readClassAndObject(input));
+    }
+  }
+
+  /**
+   * Serializer for {@link Instant}.
+   */
+  public static class InstantSerializer extends Serializer<Instant> {
+    @Override
+    public void write(Kryo kryo, Output output, Instant object) {
+      output.writeLong(object.getMillis(), true);
+    }
+
+    @Override
+    public Instant read(Kryo kryo, Input input, Class<Instant> type) {
+      return new Instant(input.readLong(true));
+    }
+  }
+
+  /**
+   * Serializer for {@link IntervalWindow}.
+   */
+  public static class IntervalWindowSerializer extends Serializer<IntervalWindow> {
+
+    @Override
+    public void write(Kryo kryo, Output output, IntervalWindow object) {
+      kryo.writeObject(output, object.start());
+      kryo.writeObject(output, object.end());
+    }
+
+    @Override
+    public IntervalWindow read(Kryo kryo, Input input, Class<IntervalWindow> type) {
+      Instant start = kryo.readObject(input, Instant.class);
+      Instant end = kryo.readObject(input, Instant.class);
+      return new IntervalWindow(start, end);
+    }
+  }
+
+  public static void registerSerializers(Config config) {
+    // Register classes with serializers
+    config.registerSerialization(KV.class, KvSerializer.class);
+    config.registerSerialization(IntervalWindow.class, IntervalWindowSerializer.class);
+
+    // Register classes with default serializer
+    config.registerSerialization(PaneInfo.class);
+    config.registerSerialization(StateNamespaces.WindowAndTriggerNamespace.class);
+    config.registerSerialization(StateNamespaces.WindowNamespace.class);
+    config.registerSerialization(StateNamespaces.GlobalNamespace.class);
+    config.registerSerialization(IntervalWindow.IntervalWindowCoder.class);
+    // Register classes of WindowedValue
+    config.registerSerialization(WindowedValue.valueInGlobalWindow(null).getClass());
+    config.registerSerialization(
+        WindowedValue.timestampedValueInGlobalWindow(null, Instant.now()).getClass());
+    config.registerSerialization(WindowedValue.of(null, BoundedWindow.TIMESTAMP_MIN_VALUE,
+        Lists.<BoundedWindow>newArrayList(), PaneInfo.NO_FIRING).getClass());
+    IntervalWindow w1 = new IntervalWindow(new Instant(1), new Instant(2));
+    IntervalWindow w2 = new IntervalWindow(new Instant(2), new Instant(3));
+    config.registerSerialization(WindowedValue.of(null, Instant.now(),
+        Lists.<BoundedWindow>newArrayList(w1), PaneInfo.NO_FIRING).getClass());
+    config.registerSerialization(WindowedValue.of(null, Instant.now(),
+        Lists.<BoundedWindow>newArrayList(w1, w2), PaneInfo.NO_FIRING).getClass());
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java
deleted file mode 100644
index 1c8053e..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-
-import java.util.Collections;
-import java.util.List;
-
-
-/**
- * Specific serializer of {@link Kryo} for Collections.
- */
-public class CollectionsSerializer {
-
-  /**
-   * Specific {@link Kryo} serializer for {@link java.util.Collections.SingletonList}.
-   */
-  public static class CollectionsSingletonListSerializer extends Serializer<List<?>> {
-    public CollectionsSingletonListSerializer() {
-      setImmutable(true);
-    }
-
-    @Override
-    public List<?> read(final Kryo kryo, final Input input, final Class<List<?>> type) {
-      final Object obj = kryo.readClassAndObject(input);
-      return Collections.singletonList(obj);
-    }
-
-    @Override
-    public void write(final Kryo kryo, final Output output, final List<?> list) {
-      kryo.writeClassAndObject(output, list.get(0));
-    }
-
-  }
-
-  public static void registerSerializers(Config config) {
-    config.registerSerialization(Collections.singletonList("").getClass(),
-            CollectionsSingletonListSerializer.class);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/GuavaUtilsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/GuavaUtilsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/GuavaUtilsSerializer.java
new file mode 100644
index 0000000..e6f750c
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/GuavaUtilsSerializer.java
@@ -0,0 +1,252 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+import com.google.common.collect.HashBasedTable;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.google.common.collect.Table;
+
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Specific serializer of {@link Kryo} for Guava utils class, e.g. ImmutableList, ImmutableMap...
+ */
+public class GuavaUtilsSerializer {
+
+  /**
+   * Specific serializer of {@link Kryo} for ImmutableList.
+   */
+  public static class ImmutableListSerializer extends Serializer<ImmutableList<Object>> {
+
+    private static final boolean DOES_NOT_ACCEPT_NULL = false;
+    private static final boolean IMMUTABLE = true;
+
+    public ImmutableListSerializer() {
+      super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
+      output.writeInt(object.size(), true);
+      for (Object elm : object) {
+        kryo.writeClassAndObject(output, elm);
+      }
+    }
+
+    @Override
+    public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
+      final int size = input.readInt(true);
+      final Object[] list = new Object[size];
+      for (int i = 0; i < size; ++i) {
+        list[i] = kryo.readClassAndObject(input);
+      }
+      return ImmutableList.copyOf(list);
+    }
+  }
+
+  /**
+   * registers its serializer for the several ImmutableList related classes.
+   */
+  private static void registerImmutableListSerializers(Config config) {
+
+    // ImmutableList (abstract class)
+    //  +- RegularImmutableList
+    //  |   RegularImmutableList
+    //  +- SingletonImmutableList
+    //  |   Optimized for List with only 1 element.
+    //  +- SubList
+    //  |   Representation for part of ImmutableList
+    //  +- ReverseImmutableList
+    //  |   For iterating in reverse order
+    //  +- StringAsImmutableList
+    //  |   Used by Lists#charactersOf
+    //  +- Values (ImmutableTable values)
+    //      Used by return value of #values() when there are multiple cells
+
+    config.registerSerialization(ImmutableList.class, ImmutableListSerializer.class);
+
+    // Note:
+    //  Only registering above is good enough for serializing/deserializing.
+    //  but if using Kryo#copy, following is required.
+
+    config.registerSerialization(ImmutableList.of().getClass(), ImmutableListSerializer.class);
+    config.registerSerialization(ImmutableList.of(1).getClass(), ImmutableListSerializer.class);
+    config.registerSerialization(
+        ImmutableList.of(1, 2, 3).subList(1, 2).getClass(),
+        ImmutableListSerializer.class);
+    config.registerSerialization(
+        ImmutableList.of().reverse().getClass(),
+        ImmutableListSerializer.class);
+
+    config.registerSerialization(
+        Lists.charactersOf("KryoRocks").getClass(),
+        ImmutableListSerializer.class);
+
+    Table<Integer, Integer, Integer> baseTable = HashBasedTable.create();
+    baseTable.put(1, 2, 3);
+    baseTable.put(4, 5, 6);
+    Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
+    config.registerSerialization(table.values().getClass(), ImmutableListSerializer.class);
+  }
+
+
+  /**
+   * Specific serializer of {@link Kryo} for ImmutableMap.
+   */
+  public static class ImmutableMapSerializer extends
+      Serializer<ImmutableMap<Object, ? extends Object>> {
+
+    private static final boolean DOES_NOT_ACCEPT_NULL = true;
+    private static final boolean IMMUTABLE = true;
+
+    public ImmutableMapSerializer() {
+      super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output,
+                      ImmutableMap<Object, ? extends Object> immutableMap) {
+      kryo.writeObject(output, Maps.newHashMap(immutableMap));
+    }
+
+    @Override
+    public ImmutableMap<Object, Object> read(
+        Kryo kryo,
+        Input input,
+        Class<ImmutableMap<Object, ? extends Object>> type) {
+      Map map = kryo.readObject(input, HashMap.class);
+      return ImmutableMap.copyOf(map);
+    }
+  }
+
+  private enum DummyEnum {
+    VALUE1,
+    VALUE2
+  }
+
+  /**
+   * Creates a new {@link ImmutableMapSerializer} and registers its serializer
+   * for the several ImmutableMap related classes.
+   */
+  private static void registerImmutableMapSerializers(Config config) {
+
+    config.registerSerialization(ImmutableMap.class, ImmutableMapSerializer.class);
+    config.registerSerialization(ImmutableMap.of().getClass(), ImmutableMapSerializer.class);
+
+    Object o1 = new Object();
+    Object o2 = new Object();
+
+    config.registerSerialization(ImmutableMap.of(o1, o1).getClass(), ImmutableMapSerializer.class);
+    config.registerSerialization(
+        ImmutableMap.of(o1, o1, o2, o2).getClass(),
+        ImmutableMapSerializer.class);
+    Map<DummyEnum, Object> enumMap = new EnumMap<DummyEnum, Object>(DummyEnum.class);
+    for (DummyEnum e : DummyEnum.values()) {
+      enumMap.put(e, o1);
+    }
+
+    config.registerSerialization(
+        ImmutableMap.copyOf(enumMap).getClass(),
+        ImmutableMapSerializer.class);
+  }
+
+  /**
+   * Specific serializer of {@link Kryo} for ImmutableSet.
+   */
+  public static class ImmutableSetSerializer extends Serializer<ImmutableSet<Object>> {
+
+    private static final boolean DOES_NOT_ACCEPT_NULL = false;
+    private static final boolean IMMUTABLE = true;
+
+    public ImmutableSetSerializer() {
+      super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
+      output.writeInt(object.size(), true);
+      for (Object elm : object) {
+        kryo.writeClassAndObject(output, elm);
+      }
+    }
+
+    @Override
+    public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
+      final int size = input.readInt(true);
+      ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
+      for (int i = 0; i < size; ++i) {
+        builder.add(kryo.readClassAndObject(input));
+      }
+      return builder.build();
+    }
+  }
+
+  private enum SomeEnum {
+    A, B, C
+  }
+
+  /**
+   * Creates a new {@link ImmutableSetSerializer} and registers its serializer
+   * for the several ImmutableSet related classes.
+   */
+  private static void registerImmutableSetSerializers(Config config) {
+
+    // ImmutableList (abstract class)
+    //  +- EmptyImmutableSet
+    //  |   EmptyImmutableSet
+    //  +- SingletonImmutableSet
+    //  |   Optimized for Set with only 1 element.
+    //  +- RegularImmutableSet
+    //  |   RegularImmutableList
+    //  +- EnumImmutableSet
+    //  |   EnumImmutableSet
+
+    config.registerSerialization(ImmutableSet.class, ImmutableSetSerializer.class);
+
+    // Note:
+    //  Only registering above is good enough for serializing/deserializing.
+    //  but if using Kryo#copy, following is required.
+
+    config.registerSerialization(ImmutableSet.of().getClass(), ImmutableSetSerializer.class);
+    config.registerSerialization(ImmutableSet.of(1).getClass(), ImmutableSetSerializer.class);
+    config.registerSerialization(ImmutableSet.of(1, 2, 3).getClass(), ImmutableSetSerializer.class);
+
+    config.registerSerialization(
+        Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(),
+        ImmutableSetSerializer.class);
+  }
+
+  public static void registerSerializers(Config config) {
+    registerImmutableListSerializers(config);
+    registerImmutableMapSerializers(config);
+    registerImmutableSetSerializers(config);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
deleted file mode 100644
index 215ccf1..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import com.google.common.collect.HashBasedTable;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableTable;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Table;
-
-/**
- * Specific serializer of {@link Kryo} for ImmutableList.
- */
-public class ImmutableListSerializer extends Serializer<ImmutableList<Object>> {
-
-  private static final boolean DOES_NOT_ACCEPT_NULL = false;
-  private static final boolean IMMUTABLE = true;
-
-  public ImmutableListSerializer() {
-    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-  }
-
-  @Override
-  public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
-    output.writeInt(object.size(), true);
-    for (Object elm : object) {
-      kryo.writeClassAndObject(output, elm);
-    }
-  }
-
-  @Override
-  public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
-    final int size = input.readInt(true);
-    final Object[] list = new Object[size];
-    for (int i = 0; i < size; ++i) {
-      list[i] = kryo.readClassAndObject(input);
-    }
-    return ImmutableList.copyOf(list);
-  }
-
-  /**
-   * Creates a new {@link ImmutableListSerializer} and registers its serializer
-   * for the several ImmutableList related classes.
-   */
-  public static void registerSerializers(Config config) {
-
-    // ImmutableList (abstract class)
-    //  +- RegularImmutableList
-    //  |   RegularImmutableList
-    //  +- SingletonImmutableList
-    //  |   Optimized for List with only 1 element.
-    //  +- SubList
-    //  |   Representation for part of ImmutableList
-    //  +- ReverseImmutableList
-    //  |   For iterating in reverse order
-    //  +- StringAsImmutableList
-    //  |   Used by Lists#charactersOf
-    //  +- Values (ImmutableTable values)
-    //      Used by return value of #values() when there are multiple cells
-
-    config.registerSerialization(ImmutableList.class, ImmutableListSerializer.class);
-
-    // Note:
-    //  Only registering above is good enough for serializing/deserializing.
-    //  but if using Kryo#copy, following is required.
-
-    config.registerSerialization(ImmutableList.of().getClass(), ImmutableListSerializer.class);
-    config.registerSerialization(ImmutableList.of(1).getClass(), ImmutableListSerializer.class);
-    config.registerSerialization(
-        ImmutableList.of(1, 2, 3).subList(1, 2).getClass(),
-        ImmutableListSerializer.class);
-    config.registerSerialization(
-        ImmutableList.of().reverse().getClass(),
-        ImmutableListSerializer.class);
-
-    config.registerSerialization(
-        Lists.charactersOf("KryoRocks").getClass(),
-        ImmutableListSerializer.class);
-
-    Table<Integer, Integer, Integer> baseTable = HashBasedTable.create();
-    baseTable.put(1, 2, 3);
-    baseTable.put(4, 5, 6);
-    Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
-    config.registerSerialization(table.values().getClass(), ImmutableListSerializer.class);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
deleted file mode 100644
index 6fe3f59..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Maps;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Specific serializer of {@link Kryo} for ImmutableMap.
- */
-public class ImmutableMapSerializer extends Serializer<ImmutableMap<Object, ? extends Object>> {
-
-  private static final boolean DOES_NOT_ACCEPT_NULL = true;
-  private static final boolean IMMUTABLE = true;
-
-  public ImmutableMapSerializer() {
-    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-  }
-
-  @Override
-  public void write(Kryo kryo, Output output, ImmutableMap<Object, ? extends Object> immutableMap) {
-    kryo.writeObject(output, Maps.newHashMap(immutableMap));
-  }
-
-  @Override
-  public ImmutableMap<Object, Object> read(
-      Kryo kryo,
-      Input input,
-      Class<ImmutableMap<Object, ? extends Object>> type) {
-    Map map = kryo.readObject(input, HashMap.class);
-    return ImmutableMap.copyOf(map);
-  }
-
-  /**
-   * Creates a new {@link ImmutableMapSerializer} and registers its serializer
-   * for the several ImmutableMap related classes.
-   */
-  public static void registerSerializers(Config config) {
-
-    config.registerSerialization(ImmutableMap.class, ImmutableMapSerializer.class);
-    config.registerSerialization(ImmutableMap.of().getClass(), ImmutableMapSerializer.class);
-
-    Object o1 = new Object();
-    Object o2 = new Object();
-
-    config.registerSerialization(ImmutableMap.of(o1, o1).getClass(), ImmutableMapSerializer.class);
-    config.registerSerialization(
-        ImmutableMap.of(o1, o1, o2, o2).getClass(),
-        ImmutableMapSerializer.class);
-    Map<DummyEnum, Object> enumMap = new EnumMap<DummyEnum, Object>(DummyEnum.class);
-    for (DummyEnum e : DummyEnum.values()) {
-      enumMap.put(e, o1);
-    }
-
-    config.registerSerialization(
-        ImmutableMap.copyOf(enumMap).getClass(),
-        ImmutableMapSerializer.class);
-  }
-
-  private enum DummyEnum {
-    VALUE1,
-    VALUE2
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
deleted file mode 100644
index 625a32c..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Sets;
-
-/**
- * Specific serializer of {@link Kryo} for ImmutableSet.
- */
-public class ImmutableSetSerializer extends Serializer<ImmutableSet<Object>> {
-
-  private static final boolean DOES_NOT_ACCEPT_NULL = false;
-  private static final boolean IMMUTABLE = true;
-
-  public ImmutableSetSerializer() {
-    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-  }
-
-  @Override
-  public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
-    output.writeInt(object.size(), true);
-    for (Object elm : object) {
-      kryo.writeClassAndObject(output, elm);
-    }
-  }
-
-  @Override
-  public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
-    final int size = input.readInt(true);
-    ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
-    for (int i = 0; i < size; ++i) {
-      builder.add(kryo.readClassAndObject(input));
-    }
-    return builder.build();
-  }
-
-  /**
-   * Creates a new {@link ImmutableSetSerializer} and registers its serializer
-   * for the several ImmutableSet related classes.
-   */
-  public static void registerSerializers(Config config) {
-
-    // ImmutableList (abstract class)
-    //  +- EmptyImmutableSet
-    //  |   EmptyImmutableSet
-    //  +- SingletonImmutableSet
-    //  |   Optimized for Set with only 1 element.
-    //  +- RegularImmutableSet
-    //  |   RegularImmutableList
-    //  +- EnumImmutableSet
-    //  |   EnumImmutableSet
-
-    config.registerSerialization(ImmutableSet.class, ImmutableSetSerializer.class);
-
-    // Note:
-    //  Only registering above is good enough for serializing/deserializing.
-    //  but if using Kryo#copy, following is required.
-
-    config.registerSerialization(ImmutableSet.of().getClass(), ImmutableSetSerializer.class);
-    config.registerSerialization(ImmutableSet.of(1).getClass(), ImmutableSetSerializer.class);
-    config.registerSerialization(ImmutableSet.of(1, 2, 3).getClass(), ImmutableSetSerializer.class);
-
-    config.registerSerialization(
-        Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(),
-        ImmutableSetSerializer.class);
-  }
-
-  private enum SomeEnum {
-    A, B, C
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JStormUtilsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JStormUtilsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JStormUtilsSerializer.java
new file mode 100644
index 0000000..69cfe24
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JStormUtilsSerializer.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.cache.ComposedKey;
+import com.alibaba.jstorm.cache.KvStoreIterable;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Specific serializer of {@link Kryo} for the Utils of JStorm Runner.
+ */
+public class JStormUtilsSerializer {
+
+  /**
+   * Specific {@link Kryo} serializer for {@link ComposedKey}.
+   */
+  public static class ComposedKeySerializer extends Serializer<ComposedKey> {
+    public ComposedKeySerializer() {
+      setImmutable(true);
+    }
+
+    @Override
+    public ComposedKey read(final Kryo kryo, final Input input, final Class<ComposedKey> type) {
+      final ComposedKey ret = ComposedKey.of();
+      int len = input.readInt(true);
+      for (int i = 0; i < len; i++) {
+        Object obj = kryo.readClassAndObject(input);
+        ret.add(obj);
+      }
+      return ret;
+    }
+
+    @Override
+    public void write(final Kryo kryo, final Output output, final ComposedKey object) {
+      int len = object.size();
+      output.writeInt(len, true);
+      for (Object elem : object) {
+        kryo.writeClassAndObject(output, elem);
+      }
+    }
+  }
+
+  private static void registerComposedKeySerializers(Config config) {
+    config.registerSerialization(ComposedKey.class, ComposedKeySerializer.class);
+  }
+
+  /**
+   * Specific serializer of {@link Kryo} for KvStoreIterable.
+   */
+  public static class KvStoreIterableSerializer extends Serializer<KvStoreIterable<Object>> {
+
+    public KvStoreIterableSerializer() {
+
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, KvStoreIterable<Object> object) {
+      int len = Iterables.size(object);
+      output.writeInt(len, true);
+      Iterator<Object> itr = object.iterator();
+      while (itr.hasNext()) {
+        Object elem = itr.next();
+        kryo.writeClassAndObject(output, elem);
+      }
+    }
+
+    @Override
+    public KvStoreIterable<Object> read(Kryo kryo, Input input,
+                                        Class<KvStoreIterable<Object>> type) {
+      final int size = input.readInt(true);
+      List<Object> values = Lists.newArrayList();
+      for (int i = 0; i < size; ++i) {
+        Object elem = kryo.readClassAndObject(input);
+        values.add(elem);
+      }
+
+      return new KvStoreIterable<Object>() {
+        Iterable<Object> values;
+
+        @Override
+        public Iterator<Object> iterator() {
+          return values.iterator();
+        }
+
+        public KvStoreIterable init(Iterable<Object> values) {
+          this.values = values;
+          return this;
+        }
+
+        @Override
+        public String toString() {
+          return values.toString();
+        }
+      }.init(values);
+    }
+  }
+
+  public static void registerSerializers(Config config) {
+    registerComposedKeySerializers(config);
+    config.registerDefaultSerailizer(KvStoreIterable.class, KvStoreIterableSerializer.class);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JavaUtilsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JavaUtilsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JavaUtilsSerializer.java
new file mode 100644
index 0000000..5df686c
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/JavaUtilsSerializer.java
@@ -0,0 +1,236 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+/**
+ * Specific serializer of {@link Kryo} for Java Utils, e.g. Collections.SingletonList...
+ */
+public class JavaUtilsSerializer {
+
+  /**
+   * Specific {@link Kryo} serializer for {@link java.util.Collections.SingletonList}.
+   */
+  public static class CollectionsSingletonListSerializer extends Serializer<List<?>> {
+    public CollectionsSingletonListSerializer() {
+      setImmutable(true);
+    }
+
+    @Override
+    public List<?> read(final Kryo kryo, final Input input, final Class<List<?>> type) {
+      final Object obj = kryo.readClassAndObject(input);
+      return Collections.singletonList(obj);
+    }
+
+    @Override
+    public void write(final Kryo kryo, final Output output, final List<?> list) {
+      kryo.writeClassAndObject(output, list.get(0));
+    }
+
+  }
+
+  /**
+   * Specific serializer of {@link Kryo} for Unmodifiable Collection.
+   */
+  public static class UnmodifiableCollectionsSerializer extends Serializer<Object> {
+
+    @Override
+    public Object read(final Kryo kryo, final Input input, final Class<Object> clazz) {
+      final int ordinal = input.readInt(true);
+      final UnmodifiableCollection unmodifiableCollection =
+          UnmodifiableCollection.values()[ordinal];
+      final Object sourceCollection = kryo.readClassAndObject(input);
+      return unmodifiableCollection.create(sourceCollection);
+    }
+
+    @Override
+    public void write(final Kryo kryo, final Output output, final Object object) {
+      try {
+        final UnmodifiableCollection unmodifiableCollection =
+            UnmodifiableCollection.valueOfType(object.getClass());
+        // the ordinal could be replaced by s.th. else (e.g. a explicitely managed "id")
+        output.writeInt(unmodifiableCollection.ordinal(), true);
+        kryo.writeClassAndObject(output, unmodifiableCollection.sourceCollectionField.get(object));
+      } catch (final RuntimeException e) {
+        // Don't eat and wrap RuntimeExceptions because the ObjectBuffer.write...
+        // handles SerializationException specifically (resizing the buffer)...
+        throw e;
+      } catch (final Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
+
+    @Override
+    public Object copy(Kryo kryo, Object original) {
+      try {
+        final UnmodifiableCollection unmodifiableCollection =
+            UnmodifiableCollection.valueOfType(original.getClass());
+        Object sourceCollectionCopy =
+            kryo.copy(unmodifiableCollection.sourceCollectionField.get(original));
+        return unmodifiableCollection.create(sourceCollectionCopy);
+      } catch (final RuntimeException e) {
+        // Don't eat and wrap RuntimeExceptions
+        throw e;
+      } catch (final Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+
+  private static final Field SOURCE_COLLECTION_FIELD;
+  private static final Field SOURCE_MAP_FIELD;
+
+  static {
+    try {
+      SOURCE_COLLECTION_FIELD = Class.forName("java.util.Collections$UnmodifiableCollection")
+          .getDeclaredField("c");
+      SOURCE_COLLECTION_FIELD.setAccessible(true);
+
+
+      SOURCE_MAP_FIELD = Class.forName("java.util.Collections$UnmodifiableMap")
+          .getDeclaredField("m");
+      SOURCE_MAP_FIELD.setAccessible(true);
+    } catch (final Exception e) {
+      throw new RuntimeException("Could not access source collection"
+          + " field in java.util.Collections$UnmodifiableCollection.", e);
+    }
+  }
+
+  private enum UnmodifiableCollection {
+    COLLECTION(
+        Collections.unmodifiableCollection(Arrays.asList("")).getClass(),
+        SOURCE_COLLECTION_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableCollection((Collection<?>) sourceCollection);
+      }
+    },
+    RANDOM_ACCESS_LIST(
+        Collections.unmodifiableList(new ArrayList<Void>()).getClass(),
+        SOURCE_COLLECTION_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableList((List<?>) sourceCollection);
+      }
+    },
+    LIST(Collections.unmodifiableList(new LinkedList<Void>()).getClass(), SOURCE_COLLECTION_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableList((List<?>) sourceCollection);
+      }
+    },
+    SET(Collections.unmodifiableSet(new HashSet<Void>()).getClass(), SOURCE_COLLECTION_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableSet((Set<?>) sourceCollection);
+      }
+    },
+    SORTED_SET(
+        Collections.unmodifiableSortedSet(new TreeSet<Void>()).getClass(),
+        SOURCE_COLLECTION_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableSortedSet((SortedSet<?>) sourceCollection);
+      }
+    },
+    MAP(Collections.unmodifiableMap(new HashMap<Void, Void>()).getClass(), SOURCE_MAP_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableMap((Map<?, ?>) sourceCollection);
+      }
+
+    },
+    SORTED_MAP(
+        Collections.unmodifiableSortedMap(new TreeMap<Void, Void>()).getClass(),
+        SOURCE_MAP_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableSortedMap((SortedMap<?, ?>) sourceCollection);
+      }
+    };
+
+    private final Class<?> type;
+    private final Field sourceCollectionField;
+
+    private UnmodifiableCollection(final Class<?> type, final Field sourceCollectionField) {
+      this.type = type;
+      this.sourceCollectionField = sourceCollectionField;
+    }
+
+    /**
+     * @param sourceCollection
+     */
+    public abstract Object create(Object sourceCollection);
+
+    static UnmodifiableCollection valueOfType(final Class<?> type) {
+      for (final UnmodifiableCollection item : values()) {
+        if (item.type.equals(type)) {
+          return item;
+        }
+      }
+      throw new IllegalArgumentException("The type " + type + " is not supported.");
+    }
+
+  }
+
+  /**
+   * Creates a new {@link UnmodifiableCollectionsSerializer} and registers its serializer
+   * for the several unmodifiable Collections that can be created via {@link Collections},
+   * including {@link Map}s.
+   *
+   * @see Collections#unmodifiableCollection(Collection)
+   * @see Collections#unmodifiableList(List)
+   * @see Collections#unmodifiableSet(Set)
+   * @see Collections#unmodifiableSortedSet(SortedSet)
+   * @see Collections#unmodifiableMap(Map)
+   * @see Collections#unmodifiableSortedMap(SortedMap)
+   */
+  private static void registerUnmodifableCollectionSerializers(Config config) {
+    UnmodifiableCollection.values();
+    for (final UnmodifiableCollection item : UnmodifiableCollection.values()) {
+      config.registerSerialization(item.type, UnmodifiableCollectionsSerializer.class);
+    }
+  }
+
+  public static void registerSerializers(Config config) {
+    config.registerSerialization(Collections.singletonList("").getClass(),
+        CollectionsSingletonListSerializer.class);
+    registerUnmodifableCollectionSerializers(config);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
deleted file mode 100644
index 44a5378..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.serialization;
-
-import com.alibaba.jstorm.cache.KvStoreIterable;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import com.google.common.collect.Lists;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Specific serializer of {@link Kryo} for KvStoreIterable.
- */
-public class KvStoreIterableSerializer extends Serializer<KvStoreIterable<Object>> {
-
-  public KvStoreIterableSerializer() {
-
-  }
-
-  @Override
-  public void write(Kryo kryo, Output output, KvStoreIterable<Object> object) {
-    List<Object> values = Lists.newArrayList(object);
-    output.writeInt(values.size(), true);
-    for (Object elm : object) {
-      kryo.writeClassAndObject(output, elm);
-    }
-  }
-
-  @Override
-  public KvStoreIterable<Object> read(Kryo kryo, Input input, Class<KvStoreIterable<Object>> type) {
-    final int size = input.readInt(true);
-    List<Object> values = Lists.newArrayList();
-    for (int i = 0; i < size; ++i) {
-      values.add(kryo.readClassAndObject(input));
-    }
-
-    return new KvStoreIterable<Object>() {
-      Iterable<Object> values;
-
-      @Override
-      public Iterator<Object> iterator() {
-        return values.iterator();
-      }
-
-      public KvStoreIterable init(Iterable<Object> values) {
-        this.values = values;
-        return this;
-      }
-
-      @Override
-      public String toString() {
-        return values.toString();
-      }
-    }.init(values);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
deleted file mode 100644
index e4e0e12..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.HashBasedTable;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableList;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableTable;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.Lists;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.Table;
-
-/**
- * Specific serializer of {@link Kryo} for Beam SDK repackaged ImmutableList.
- */
-public class SdkRepackImmuListSerializer extends Serializer<ImmutableList<Object>> {
-
-  private static final boolean DOES_NOT_ACCEPT_NULL = false;
-  private static final boolean IMMUTABLE = true;
-
-  public SdkRepackImmuListSerializer() {
-    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-  }
-
-  @Override
-  public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
-    output.writeInt(object.size(), true);
-    for (Object elm : object) {
-      kryo.writeClassAndObject(output, elm);
-    }
-  }
-
-  @Override
-  public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
-    final int size = input.readInt(true);
-    final Object[] list = new Object[size];
-    for (int i = 0; i < size; ++i) {
-      list[i] = kryo.readClassAndObject(input);
-    }
-    return ImmutableList.copyOf(list);
-  }
-
-  /**
-   * Creates a new {@link ImmutableListSerializer} and registers its serializer
-   * for the several ImmutableList related classes.
-   */
-  public static void registerSerializers(Config config) {
-
-    // ImmutableList (abstract class)
-    //  +- RegularImmutableList
-    //  |   RegularImmutableList
-    //  +- SingletonImmutableList
-    //  |   Optimized for List with only 1 element.
-    //  +- SubList
-    //  |   Representation for part of ImmutableList
-    //  +- ReverseImmutableList
-    //  |   For iterating in reverse order
-    //  +- StringAsImmutableList
-    //  |   Used by Lists#charactersOf
-    //  +- Values (ImmutableTable values)
-    //      Used by return value of #values() when there are multiple cells
-
-    config.registerSerialization(ImmutableList.class, SdkRepackImmuListSerializer.class);
-
-    // Note:
-    //  Only registering above is good enough for serializing/deserializing.
-    //  but if using Kryo#copy, following is required.
-
-    config.registerSerialization(ImmutableList.of().getClass(), SdkRepackImmuListSerializer.class);
-    config.registerSerialization(ImmutableList.of(1).getClass(), SdkRepackImmuListSerializer.class);
-    config.registerSerialization(
-        ImmutableList.of(1, 2, 3).subList(1, 2).getClass(),
-        SdkRepackImmuListSerializer.class);
-    config.registerSerialization(
-        ImmutableList.of().reverse().getClass(),
-        SdkRepackImmuListSerializer.class);
-
-    config.registerSerialization(
-        Lists.charactersOf("KryoRocks").getClass(),
-        SdkRepackImmuListSerializer.class);
-
-    Table<Integer, Integer, Integer> baseTable = HashBasedTable.create();
-    baseTable.put(1, 2, 3);
-    baseTable.put(4, 5, 6);
-    Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
-    config.registerSerialization(table.values().getClass(), SdkRepackImmuListSerializer.class);
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
deleted file mode 100644
index 3cb60e8..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableSet;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.Sets;
-
-/**
- * Specific serializer of {@link Kryo} for Beam SDK repackaged ImmutableSet.
- */
-public class SdkRepackImmuSetSerializer extends Serializer<ImmutableSet<Object>> {
-
-  private static final boolean DOES_NOT_ACCEPT_NULL = false;
-  private static final boolean IMMUTABLE = true;
-
-  public SdkRepackImmuSetSerializer() {
-    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-  }
-
-  @Override
-  public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
-    output.writeInt(object.size(), true);
-    for (Object elm : object) {
-      kryo.writeClassAndObject(output, elm);
-    }
-  }
-
-  @Override
-  public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
-    final int size = input.readInt(true);
-    ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
-    for (int i = 0; i < size; ++i) {
-      builder.add(kryo.readClassAndObject(input));
-    }
-    return builder.build();
-  }
-
-  /**
-   * Creates a new {@link ImmutableSetSerializer} and registers its serializer
-   * for the several ImmutableSet related classes.
-   */
-  public static void registerSerializers(Config config) {
-
-    // ImmutableList (abstract class)
-    //  +- EmptyImmutableSet
-    //  |   EmptyImmutableSet
-    //  +- SingletonImmutableSet
-    //  |   Optimized for Set with only 1 element.
-    //  +- RegularImmutableSet
-    //  |   RegularImmutableList
-    //  +- EnumImmutableSet
-    //  |   EnumImmutableSet
-
-    config.registerSerialization(ImmutableSet.class, SdkRepackImmuSetSerializer.class);
-
-    // Note:
-    //  Only registering above is good enough for serializing/deserializing.
-    //  but if using Kryo#copy, following is required.
-
-    config.registerSerialization(ImmutableSet.of().getClass(), SdkRepackImmuSetSerializer.class);
-    config.registerSerialization(ImmutableSet.of(1).getClass(), SdkRepackImmuSetSerializer.class);
-    config.registerSerialization(
-        ImmutableSet.of(1, 2, 3).getClass(),
-        SdkRepackImmuSetSerializer.class);
-
-    config.registerSerialization(
-        Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(),
-        SdkRepackImmuSetSerializer.class);
-  }
-
-  private enum SomeEnum {
-    A, B, C
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java
deleted file mode 100644
index 1a0d902..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.Map;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableMap;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.Maps;
-
-/**
- * Specific serializer of {@link Kryo} for ImmutableMap.
- */
-public class SdkRepackImmutableMapSerializer
-    extends Serializer<ImmutableMap<Object, ? extends Object>> {
-
-  private static final boolean DOES_NOT_ACCEPT_NULL = true;
-  private static final boolean IMMUTABLE = true;
-
-  public SdkRepackImmutableMapSerializer() {
-    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-  }
-
-  @Override
-  public void write(Kryo kryo, Output output, ImmutableMap<Object, ? extends Object> immutableMap) {
-    kryo.writeObject(output, Maps.newHashMap(immutableMap));
-  }
-
-  @Override
-  public ImmutableMap<Object, Object> read(
-      Kryo kryo,
-      Input input,
-      Class<ImmutableMap<Object, ? extends Object>> type) {
-    Map map = kryo.readObject(input, HashMap.class);
-    return ImmutableMap.copyOf(map);
-  }
-
-  /**
-   * Creates a new {@link SdkRepackImmutableMapSerializer} and registers its serializer
-   * for the several ImmutableMap related classes.
-   */
-  public static void registerSerializers(Config config) {
-
-    config.registerSerialization(ImmutableMap.class, SdkRepackImmutableMapSerializer.class);
-    config.registerSerialization(
-        ImmutableMap.of().getClass(), SdkRepackImmutableMapSerializer.class);
-
-    Object o1 = new Object();
-    Object o2 = new Object();
-
-    config.registerSerialization(
-        ImmutableMap.of(o1, o1).getClass(), SdkRepackImmutableMapSerializer.class);
-    config.registerSerialization(
-        ImmutableMap.of(o1, o1, o2, o2).getClass(),
-        SdkRepackImmutableMapSerializer.class);
-    Map<DummyEnum, Object> enumMap = new EnumMap<DummyEnum, Object>(DummyEnum.class);
-    for (DummyEnum e : DummyEnum.values()) {
-      enumMap.put(e, o1);
-    }
-
-    config.registerSerialization(
-        ImmutableMap.copyOf(enumMap).getClass(),
-        SdkRepackImmutableMapSerializer.class);
-  }
-
-  private enum DummyEnum {
-    VALUE1,
-    VALUE2
-  }
-}


[48/53] [abbrv] beam git commit: jstorm-runner: Add maven repository for JStorm dependency

Posted by pe...@apache.org.
jstorm-runner: Add maven repository for JStorm dependency


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/00b9c5c8
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/00b9c5c8
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/00b9c5c8

Branch: refs/heads/jstorm-runner
Commit: 00b9c5c867a57c291c43caa2b554dda9bb3228ba
Parents: 87aaa6e
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Thu Aug 17 19:13:50 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:03:01 2017 +0800

----------------------------------------------------------------------
 runners/jstorm/pom.xml | 31 +++++++++++++++++++++++++++++++
 1 file changed, 31 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/00b9c5c8/runners/jstorm/pom.xml
----------------------------------------------------------------------
diff --git a/runners/jstorm/pom.xml b/runners/jstorm/pom.xml
index 75387ef..681adb5 100644
--- a/runners/jstorm/pom.xml
+++ b/runners/jstorm/pom.xml
@@ -34,6 +34,37 @@
     <jstorm.core.version>2.5.0-SNAPSHOT</jstorm.core.version>
   </properties>
 
+  <repositories>
+    <repository>
+      <id>apache.snapshots</id>
+      <name>Apache Snapshot Repository</name>
+      <url>https://repository.apache.org/snapshots</url>
+      <releases>
+        <enabled>false</enabled>
+      </releases>
+    </repository>
+    <repository>
+      <id>ossrh releases</id>
+      <url>https://oss.sonatype.org/content/repositories/releases</url>
+      <releases>
+        <enabled>true</enabled>
+      </releases>
+      <snapshots>
+        <enabled>false</enabled>
+      </snapshots>
+    </repository>
+    <repository>
+      <id>ossrh snapshots</id>
+      <url>https://oss.sonatype.org/content/repositories/snapshots</url>
+      <releases>
+        <enabled>false</enabled>
+      </releases>
+      <snapshots>
+        <enabled>true</enabled>
+      </snapshots>
+    </repository>
+  </repositories>
+
   <profiles>
     <profile>
       <!-- This profile adds execution of ValidatesRunner integration tests


[06/53] [abbrv] beam git commit: jstorm-runner: rename the package to org.apache.beam.runners.jstorm.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ParDoBoundMultiTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ParDoBoundMultiTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ParDoBoundMultiTranslator.java
deleted file mode 100644
index 1870681..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ParDoBoundMultiTranslator.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import avro.shaded.com.google.common.collect.Lists;
-import avro.shaded.com.google.common.collect.Maps;
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-import com.alibaba.jstorm.beam.translation.runtime.DoFnExecutor;
-import com.alibaba.jstorm.beam.translation.runtime.MultiOutputDoFnExecutor;
-import com.alibaba.jstorm.beam.translation.runtime.MultiStatefulDoFnExecutor;
-import com.google.common.collect.ImmutableList;
-import com.google.common.base.Function;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.ImmutableMap;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.*;
-
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Translates a ParDo.BoundMulti to a Storm {@link com.alibaba.jstorm.beam.translation.runtime.DoFnExecutor}.
- */
-public class ParDoBoundMultiTranslator<InputT, OutputT>
-        extends TransformTranslator.Default<ParDo.MultiOutput<InputT, OutputT>> {
-
-    @Override
-    public void translateNode(ParDo.MultiOutput<InputT, OutputT> transform, TranslationContext context) {
-        final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        final TupleTag<InputT> inputTag = (TupleTag<InputT>) userGraphContext.getInputTag();
-        PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
-
-        Map<TupleTag<?>, PValue> allOutputs = Maps.newHashMap(userGraphContext.getOutputs());
-        Map<TupleTag<?>, TupleTag<?>> localToExternalTupleTagMap = Maps.newHashMap();
-        for (Map.Entry<TupleTag<?>, PValue> entry : allOutputs.entrySet()) {
-            Iterator<TupleTag<?>> itr = ((PValueBase) entry.getValue()).expand().keySet().iterator();
-            localToExternalTupleTagMap.put(entry.getKey(), itr.next());
-        }
-
-        TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
-        List<TupleTag<?>> sideOutputTags = userGraphContext.getOutputTags();
-        sideOutputTags.remove(mainOutputTag);
-
-        Map<TupleTag<?>, PValue> allInputs = Maps.newHashMap(userGraphContext.getInputs());
-        for (PCollectionView pCollectionView : transform.getSideInputs()) {
-            allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-        }
-        String description = describeTransform(
-                transform,
-                allInputs,
-                allOutputs);
-
-        ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
-        for (PCollectionView pCollectionView : transform.getSideInputs()) {
-            sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-        }
-
-        DoFnExecutor executor;
-        DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
-        if (signature.stateDeclarations().size() > 0
-                || signature.timerDeclarations().size() > 0) {
-            executor = new MultiStatefulDoFnExecutor<>(
-                    userGraphContext.getStepName(),
-                    description,
-                    userGraphContext.getOptions(),
-                    (DoFn<KV, OutputT>) transform.getFn(),
-                    (Coder) WindowedValue.getFullCoder(input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-                    input.getWindowingStrategy(),
-                    (TupleTag<KV>) inputTag,
-                    transform.getSideInputs(),
-                    sideInputTagToView.build(),
-                    mainOutputTag,
-                    sideOutputTags,
-                    localToExternalTupleTagMap);
-        } else {
-            executor = new MultiOutputDoFnExecutor<>(
-                    userGraphContext.getStepName(),
-                    description,
-                    userGraphContext.getOptions(),
-                    transform.getFn(),
-                    WindowedValue.getFullCoder(input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-                    input.getWindowingStrategy(),
-                    inputTag,
-                    transform.getSideInputs(),
-                    sideInputTagToView.build(),
-                    mainOutputTag,
-                    sideOutputTags,
-                    localToExternalTupleTagMap);
-        }
-
-        context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ParDoBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ParDoBoundTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ParDoBoundTranslator.java
deleted file mode 100644
index a8d8186..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ParDoBoundTranslator.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import java.util.List;
-import java.util.Map;
-
-import avro.shaded.com.google.common.collect.Lists;
-import com.alibaba.jstorm.beam.translation.runtime.StatefulDoFnExecutor;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.*;
-
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-import com.alibaba.jstorm.beam.translation.runtime.DoFnExecutor;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Translates a ParDo.Bound to a Storm {@link DoFnExecutor}.
- */
-public class ParDoBoundTranslator<InputT, OutputT>
-        extends TransformTranslator.Default<ParDo.SingleOutput<InputT, OutputT>> {
-
-    private static final Logger LOG = LoggerFactory.getLogger(ParDoBoundTranslator.class);
-
-    @Override
-    public void translateNode(ParDo.SingleOutput<InputT, OutputT> transform, TranslationContext context) {
-        final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        final TupleTag<?> inputTag = userGraphContext.getInputTag();
-        PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
-
-        TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
-        List<TupleTag<?>> sideOutputTags = Lists.newArrayList();
-
-        Map<TupleTag<?>, PValue> allInputs = avro.shaded.com.google.common.collect.Maps.newHashMap(userGraphContext.getInputs());
-        for (PCollectionView pCollectionView : transform.getSideInputs()) {
-            allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-        }
-        String description = describeTransform(
-                transform,
-                allInputs,
-                userGraphContext.getOutputs());
-
-        ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
-        for (PCollectionView pCollectionView : transform.getSideInputs()) {
-            sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-        }
-
-        DoFnExecutor executor;
-        DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
-        if (signature.stateDeclarations().size() > 0
-                || signature.timerDeclarations().size() > 0) {
-            executor = new StatefulDoFnExecutor<>(
-                    userGraphContext.getStepName(),
-                    description,
-                    userGraphContext.getOptions(),
-                    (DoFn<KV, OutputT>) transform.getFn(),
-                    (Coder) WindowedValue.getFullCoder(
-                            input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-                    input.getWindowingStrategy(),
-                    (TupleTag<KV>) inputTag,
-                    transform.getSideInputs(),
-                    sideInputTagToView.build(),
-                    mainOutputTag,
-                    sideOutputTags);
-        } else {
-            executor = new DoFnExecutor<>(
-                    userGraphContext.getStepName(),
-                    description,
-                    userGraphContext.getOptions(),
-                    transform.getFn(),
-                    WindowedValue.getFullCoder(input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-                    input.getWindowingStrategy(),
-                    (TupleTag<InputT>) inputTag,
-                    transform.getSideInputs(),
-                    sideInputTagToView.build(),
-                    mainOutputTag,
-                    sideOutputTags);
-        }
-
-        context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ReshuffleTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ReshuffleTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ReshuffleTranslator.java
deleted file mode 100644
index 26a9b22..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ReshuffleTranslator.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import org.apache.beam.sdk.transforms.Reshuffle;
-
-public class ReshuffleTranslator<K, V> extends TransformTranslator.Default<Reshuffle<K,V>> {
-    
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/Stream.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/Stream.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/Stream.java
deleted file mode 100644
index f80a39d..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/Stream.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import com.google.auto.value.AutoValue;
-
-import javax.annotation.Nullable;
-import java.util.List;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Class that defines the stream connection between upstream and downstream components.
- */
-@AutoValue
-public abstract class Stream {
-
-    public abstract Producer getProducer();
-    public abstract Consumer getConsumer();
-
-    public static Stream of(Producer producer, Consumer consumer) {
-        return new com.alibaba.jstorm.beam.translation.translator.AutoValue_Stream(producer, consumer);
-    }
-
-    @AutoValue
-    public abstract static class Producer {
-        public abstract String getComponentId();
-        public abstract String getStreamId();
-        public abstract String getStreamName();
-
-        public static Producer of(String componentId, String streamId, String streamName) {
-            return new com.alibaba.jstorm.beam.translation.translator.AutoValue_Stream_Producer(
-                    componentId, streamId, streamName);
-        }
-    }
-
-    @AutoValue
-    public abstract static class Consumer {
-        public abstract String getComponentId();
-        public abstract Grouping getGrouping();
-
-        public static Consumer of(String componentId, Grouping grouping) {
-            return new com.alibaba.jstorm.beam.translation.translator.AutoValue_Stream_Consumer(
-                    componentId, grouping);
-        }
-    }
-
-    @AutoValue
-    public abstract static class Grouping {
-        public abstract Type getType();
-
-        @Nullable
-        public abstract List<String> getFields();
-
-        public static Grouping of(Type type) {
-            checkArgument(!Type.FIELDS.equals(type), "Fields grouping should provide key fields.");
-            return new com.alibaba.jstorm.beam.translation.translator.AutoValue_Stream_Grouping(
-                    type, null /* fields */);
-        }
-
-        public static Grouping byFields(List<String> fields) {
-            checkNotNull(fields, "fields");
-            checkArgument(!fields.isEmpty(), "No key fields were provided for field grouping!");
-            return new com.alibaba.jstorm.beam.translation.translator.AutoValue_Stream_Grouping(
-                    Type.FIELDS, fields);
-        }
-
-        /**
-         * Types of stream groupings Storm allows
-         */
-        public enum Type {
-            ALL, CUSTOM, DIRECT, SHUFFLE, LOCAL_OR_SHUFFLE, FIELDS, GLOBAL, NONE
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/TransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/TransformTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/TransformTranslator.java
deleted file mode 100644
index e1c35f6..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/TransformTranslator.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.collect.FluentIterable;
-import org.apache.beam.sdk.transforms.PTransform;
-
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.TaggedPValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-import java.util.Map;
-
-/**
- * Interface for classes capable of tranforming Beam PTransforms into Storm primitives.
- */
-public interface TransformTranslator<T extends PTransform<?, ?>> {
-
-    void translateNode(T transform, TranslationContext context);
-
-    /**
-     * Returns true if this translator can translate the given transform.
-     */
-    boolean canTranslate(T transform, TranslationContext context);
-
-    class Default<T1 extends PTransform<?, ?>> implements TransformTranslator<T1> {
-        @Override
-        public void translateNode(T1 transform, TranslationContext context) {
-
-        }
-
-        @Override
-        public boolean canTranslate(T1 transform, TranslationContext context) {
-            return true;
-        }
-
-        static String describeTransform(
-                PTransform<?, ?> transform,
-                Map<TupleTag<?>, PValue> inputs,
-                Map<TupleTag<?>, PValue> outputs) {
-            return String.format("%s --> %s --> %s",
-                    Joiner.on('+').join(FluentIterable.from(inputs.entrySet())
-                            .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
-                                @Override
-                                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPValue) {
-                                    return taggedPValue.getKey().getId();
-                                    // return taggedPValue.getValue().getName();
-                                }})),
-                    transform.getName(),
-                    Joiner.on('+').join(FluentIterable.from(outputs.entrySet())
-                            .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
-                                @Override
-                                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPvalue) {
-                                    return taggedPvalue.getKey().getId();
-                                    //return taggedPValue.getValue().getName();
-                                }})));
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/UnboundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/UnboundedSourceTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/UnboundedSourceTranslator.java
deleted file mode 100644
index 0677e92..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/UnboundedSourceTranslator.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import org.apache.beam.sdk.io.Read;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.TaggedPValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-import com.alibaba.jstorm.beam.translation.runtime.UnboundedSourceSpout;
-
-/**
- * Translates a Read.Unbounded into a Storm spout.
- * 
- * @param <T>
- */
-public class UnboundedSourceTranslator<T> extends TransformTranslator.Default<Read.Unbounded<T>> {
-    public void translateNode(Read.Unbounded<T> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-
-        TupleTag<?> tag = userGraphContext.getOutputTag();
-        PValue output = userGraphContext.getOutput();
-
-        UnboundedSourceSpout spout = new UnboundedSourceSpout(
-                description,
-                transform.getSource(), userGraphContext.getOptions(), tag);
-        context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(tag, output));
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ViewTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ViewTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ViewTranslator.java
deleted file mode 100644
index 3069955..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/ViewTranslator.java
+++ /dev/null
@@ -1,374 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-import com.alibaba.jstorm.beam.translation.runtime.ViewExecutor;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.coders.CoderRegistry;
-import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.coders.ListCoder;
-import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.View;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.PCollectionViews;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
-/**
- * A {@link TransformTranslator} for executing {@link View Views} in JStorm runner.
- */
-public class ViewTranslator extends TransformTranslator.Default<ViewTranslator.CreateJStormPCollectionView<?, ?>> {
-    @Override
-    public void translateNode(CreateJStormPCollectionView<?, ?> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-        ViewExecutor viewExecutor = new ViewExecutor(description, userGraphContext.getOutputTag());
-        context.addTransformExecutor(viewExecutor);
-    }
-
-    /**
-     * Specialized implementation for
-     * {@link org.apache.beam.sdk.transforms.View.AsMap View.AsMap}
-     * for the Flink runner in streaming mode.
-     */
-    public static class ViewAsMap<K, V>
-            extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, V>>> {
-
-        @SuppressWarnings("unused") // used via reflection in JstormRunner#apply()
-        public ViewAsMap(View.AsMap<K, V> transform) {
-        }
-
-        @Override
-        public PCollectionView<Map<K, V>> expand(PCollection<KV<K, V>> input) {
-            PCollectionView<Map<K, V>> view =
-                    PCollectionViews.mapView(
-                            input,
-                            input.getWindowingStrategy(),
-                            input.getCoder());
-
-            @SuppressWarnings({"rawtypes", "unchecked"})
-            KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
-            try {
-                inputCoder.getKeyCoder().verifyDeterministic();
-            } catch (Coder.NonDeterministicException e) {
-                // TODO: log warning as other runners.
-            }
-
-            return input
-                    .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
-                    .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, V>>of(view));
-        }
-
-        @Override
-        protected String getKindString() {
-            return "StreamingViewAsMap";
-        }
-    }
-
-    /**
-     * Specialized expansion for {@link
-     * View.AsMultimap View.AsMultimap} for the
-     * Flink runner in streaming mode.
-     */
-    public static class ViewAsMultimap<K, V>
-            extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, Iterable<V>>>> {
-
-        /**
-         * Builds an instance of this class from the overridden transform.
-         */
-        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-        public ViewAsMultimap(View.AsMultimap<K, V> transform) {
-        }
-
-        @Override
-        public PCollectionView<Map<K, Iterable<V>>> expand(PCollection<KV<K, V>> input) {
-            PCollectionView<Map<K, Iterable<V>>> view =
-                    PCollectionViews.multimapView(
-                            input,
-                            input.getWindowingStrategy(),
-                            input.getCoder());
-
-            @SuppressWarnings({"rawtypes", "unchecked"})
-            KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
-            try {
-                inputCoder.getKeyCoder().verifyDeterministic();
-            } catch (Coder.NonDeterministicException e) {
-                // TODO: log warning as other runners.
-            }
-
-            return input
-                    .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
-                    .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, Iterable<V>>>of(view));
-        }
-
-        @Override
-        protected String getKindString() {
-            return "StreamingViewAsMultimap";
-        }
-    }
-
-    /**
-     * Specialized implementation for
-     * {@link View.AsList View.AsList} for the
-     * JStorm runner in streaming mode.
-     */
-    public static class ViewAsList<T>
-            extends PTransform<PCollection<T>, PCollectionView<List<T>>> {
-        /**
-         * Builds an instance of this class from the overridden transform.
-         */
-        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-        public ViewAsList(View.AsList<T> transform) {}
-
-        @Override
-        public PCollectionView<List<T>> expand(PCollection<T> input) {
-            PCollectionView<List<T>> view =
-                    PCollectionViews.listView(
-                            input,
-                            input.getWindowingStrategy(),
-                            input.getCoder());
-
-            return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
-                    .apply(CreateJStormPCollectionView.<T, List<T>>of(view));
-        }
-
-        @Override
-        protected String getKindString() {
-            return "StreamingViewAsList";
-        }
-    }
-
-    /**
-     * Specialized implementation for
-     * {@link View.AsIterable View.AsIterable} for the
-     * JStorm runner in streaming mode.
-     */
-    public static class ViewAsIterable<T>
-            extends PTransform<PCollection<T>, PCollectionView<Iterable<T>>> {
-        /**
-         * Builds an instance of this class from the overridden transform.
-         */
-        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-        public ViewAsIterable(View.AsIterable<T> transform) { }
-
-        @Override
-        public PCollectionView<Iterable<T>> expand(PCollection<T> input) {
-            PCollectionView<Iterable<T>> view =
-                    PCollectionViews.iterableView(
-                            input,
-                            input.getWindowingStrategy(),
-                            input.getCoder());
-
-            return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
-                    .apply(CreateJStormPCollectionView.<T, Iterable<T>>of(view));
-        }
-
-        @Override
-        protected String getKindString() {
-            return "StreamingViewAsIterable";
-        }
-    }
-
-    /**
-     * Specialized expansion for
-     * {@link View.AsSingleton View.AsSingleton} for the
-     * JStorm runner in streaming mode.
-     */
-    public static class ViewAsSingleton<T>
-            extends PTransform<PCollection<T>, PCollectionView<T>> {
-        private View.AsSingleton<T> transform;
-
-        /**
-         * Builds an instance of this class from the overridden transform.
-         */
-        @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-        public ViewAsSingleton(View.AsSingleton<T> transform) {
-            this.transform = transform;
-        }
-
-        @Override
-        public PCollectionView<T> expand(PCollection<T> input) {
-            Combine.Globally<T, T> combine = Combine.globally(
-                    new SingletonCombine<>(transform.hasDefaultValue(), transform.defaultValue()));
-            if (!transform.hasDefaultValue()) {
-                combine = combine.withoutDefaults();
-            }
-            return input.apply(combine.asSingletonView());
-        }
-
-        @Override
-        protected String getKindString() {
-            return "StreamingViewAsSingleton";
-        }
-
-        private static class SingletonCombine<T> extends Combine.BinaryCombineFn<T> {
-            private boolean hasDefaultValue;
-            private T defaultValue;
-
-            SingletonCombine(boolean hasDefaultValue, T defaultValue) {
-                this.hasDefaultValue = hasDefaultValue;
-                this.defaultValue = defaultValue;
-            }
-
-            @Override
-            public T apply(T left, T right) {
-                throw new IllegalArgumentException("PCollection with more than one element "
-                        + "accessed as a singleton view. Consider using Combine.globally().asSingleton() to "
-                        + "combine the PCollection into a single value");
-            }
-
-            @Override
-            public T identity() {
-                if (hasDefaultValue) {
-                    return defaultValue;
-                } else {
-                    throw new IllegalArgumentException(
-                            "Empty PCollection accessed as a singleton view. "
-                                    + "Consider setting withDefault to provide a default value");
-                }
-            }
-        }
-    }
-
-    public static class CombineGloballyAsSingletonView<InputT, OutputT>
-            extends PTransform<PCollection<InputT>, PCollectionView<OutputT>> {
-        Combine.GloballyAsSingletonView<InputT, OutputT> transform;
-
-        /**
-         * Builds an instance of this class from the overridden transform.
-         */
-        @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply()
-        public CombineGloballyAsSingletonView(
-                Combine.GloballyAsSingletonView<InputT, OutputT> transform) {
-            this.transform = transform;
-        }
-
-        @Override
-        public PCollectionView<OutputT> expand(PCollection<InputT> input) {
-            PCollection<OutputT> combined =
-                    input.apply(Combine.globally(transform.getCombineFn())
-                            .withoutDefaults()
-                            .withFanout(transform.getFanout()));
-
-            PCollectionView<OutputT> view = PCollectionViews.singletonView(
-                    combined,
-                    combined.getWindowingStrategy(),
-                    transform.getInsertDefault(),
-                    transform.getInsertDefault()
-                            ? transform.getCombineFn().defaultValue() : null,
-                    combined.getCoder());
-            return combined
-                    .apply(ParDo.of(new WrapAsList<OutputT>()))
-                    .apply(CreateJStormPCollectionView.<OutputT, OutputT>of(view));
-        }
-
-        @Override
-        protected String getKindString() {
-            return "StreamingCombineGloballyAsSingletonView";
-        }
-    }
-
-    private static class WrapAsList<T> extends DoFn<T, List<T>> {
-        @ProcessElement
-        public void processElement(ProcessContext c) {
-            c.output(Collections.singletonList(c.element()));
-        }
-    }
-
-    /**
-     * Combiner that combines {@code T}s into a single {@code List<T>} containing all inputs.
-     * They require the input {@link PCollection} fits in memory.
-     * For a large {@link PCollection} this is expected to crash!
-     *
-     * @param <T> the type of elements to concatenate.
-     */
-    private static class Concatenate<T> extends Combine.CombineFn<T, List<T>, List<T>> {
-        private static final long serialVersionUID = 1L;
-
-        @Override
-        public List<T> createAccumulator() {
-            return new ArrayList<>();
-        }
-
-        @Override
-        public List<T> addInput(List<T> accumulator, T input) {
-            accumulator.add(input);
-            return accumulator;
-        }
-
-        @Override
-        public List<T> mergeAccumulators(Iterable<List<T>> accumulators) {
-            List<T> result = createAccumulator();
-            for (List<T> accumulator : accumulators) {
-                result.addAll(accumulator);
-            }
-            return result;
-        }
-
-        @Override
-        public List<T> extractOutput(List<T> accumulator) {
-            return accumulator;
-        }
-
-        @Override
-        public Coder<List<T>> getAccumulatorCoder(CoderRegistry registry, Coder<T> inputCoder) {
-            return ListCoder.of(inputCoder);
-        }
-
-        @Override
-        public Coder<List<T>> getDefaultOutputCoder(CoderRegistry registry, Coder<T> inputCoder) {
-            return ListCoder.of(inputCoder);
-        }
-    }
-
-    /**
-     * Creates a primitive {@link PCollectionView}.
-     *
-     * <p>For internal use only by runner implementors.
-     *
-     * @param <ElemT> The type of the elements of the input PCollection
-     * @param <ViewT> The type associated with the {@link PCollectionView} used as a side input
-     */
-    public static class CreateJStormPCollectionView<ElemT, ViewT>
-            extends PTransform<PCollection<List<ElemT>>, PCollectionView<ViewT>> {
-        private PCollectionView<ViewT> view;
-
-        private CreateJStormPCollectionView(PCollectionView<ViewT> view) {
-            this.view = view;
-        }
-
-        public static <ElemT, ViewT> CreateJStormPCollectionView<ElemT, ViewT> of(
-                PCollectionView<ViewT> view) {
-            return new CreateJStormPCollectionView<>(view);
-        }
-
-        @Override
-        public PCollectionView<ViewT> expand(PCollection<List<ElemT>> input) {
-            return view;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/WindowAssignTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/WindowAssignTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/WindowAssignTranslator.java
deleted file mode 100644
index 7fe8ddd..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/WindowAssignTranslator.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import com.alibaba.jstorm.beam.translation.runtime.WindowAssignExecutor;
-import org.apache.beam.sdk.transforms.windowing.Window;
-
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-
-public class WindowAssignTranslator<T> extends TransformTranslator.Default<Window.Assign<T>> {
-
-    @Override
-    public void translateNode(Window.Assign<T> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-        context.getUserGraphContext().setWindowed();
-        WindowAssignExecutor executor = new WindowAssignExecutor(
-                description,
-                transform.getWindowFn(),
-                userGraphContext.getOutputTag());
-        context.addTransformExecutor(executor);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/WindowBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/WindowBoundTranslator.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/WindowBoundTranslator.java
deleted file mode 100644
index 0b35052..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/translator/WindowBoundTranslator.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import org.apache.beam.sdk.transforms.windowing.FixedWindows;
-import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
-import org.apache.beam.sdk.transforms.windowing.Window;
-
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-import org.apache.beam.sdk.values.PValue;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Translates a Window.Bound node into a Storm WindowedBolt
- * 
- * @param <T>
- */
-public class WindowBoundTranslator<T> extends TransformTranslator.Default<Window.Assign<T>> {
-    private static final Logger LOG = LoggerFactory.getLogger(WindowBoundTranslator.class);
-
-    // Do nothing here currently. The assign of window strategy is included in AssignTranslator.
-    @Override
-    public void translateNode(Window.Assign<T> transform, TranslationContext context) {
-        if (transform.getWindowFn() instanceof FixedWindows) {
-            context.getUserGraphContext().setWindowed();
-        } else if (transform.getWindowFn() instanceof SlidingWindows) {
-            context.getUserGraphContext().setWindowed();
-        } else {
-            throw new UnsupportedOperationException("Not supported window type currently: " + transform.getWindowFn());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/CommonInstance.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/CommonInstance.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/CommonInstance.java
deleted file mode 100644
index a75efa9..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/CommonInstance.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.util;
-
-public class CommonInstance {
-    public static final String KEY = "Key";
-    public static final String VALUE = "Value";
-
-    public static final String BEAM_WATERMARK_STREAM_ID = "BEAM_WATERMARK";
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/DefaultSideInputReader.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/DefaultSideInputReader.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/DefaultSideInputReader.java
deleted file mode 100644
index 8bf49d8..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/DefaultSideInputReader.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.util;
-
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.runners.core.SideInputReader;
-import org.apache.beam.sdk.values.PCollectionView;
-
-import javax.annotation.Nullable;
-import java.io.Serializable;
-
-/**
- * No-op SideInputReader implementation.
- */
-public class DefaultSideInputReader implements SideInputReader, Serializable {
-    @Nullable
-    @Override
-    public <T> T get(PCollectionView<T> pCollectionView, BoundedWindow boundedWindow) {
-        return null;
-    }
-
-    @Override
-    public <T> boolean contains(PCollectionView<T> pCollectionView) {
-        return false;
-    }
-
-    @Override
-    public boolean isEmpty() {
-        return true;
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/DefaultStepContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/DefaultStepContext.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/DefaultStepContext.java
deleted file mode 100644
index 08d1f2d..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/util/DefaultStepContext.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.util;
-
-import org.apache.beam.runners.core.ExecutionContext;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.core.StateInternals;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-import java.io.IOException;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Default StepContext for running DoFn This does not allow accessing state or timer internals.
- */
-public class DefaultStepContext implements ExecutionContext.StepContext {
-
-    private TimerInternals timerInternals;
-
-    private StateInternals stateInternals;
-
-    public DefaultStepContext(TimerInternals timerInternals, StateInternals stateInternals) {
-        this.timerInternals = checkNotNull(timerInternals, "timerInternals");
-        this.stateInternals = checkNotNull(stateInternals, "stateInternals");
-    }
-
-    @Override
-    public String getStepName() {
-        return null;
-    }
-
-    @Override
-    public String getTransformName() {
-        return null;
-    }
-
-    @Override
-    public void noteOutput(WindowedValue<?> windowedValue) {
-
-    }
-
-    @Override
-    public void noteOutput(TupleTag<?> tupleTag, WindowedValue<?> windowedValue) {
-
-    }
-
-    @Override
-    public <T, W extends BoundedWindow> void writePCollectionViewData(TupleTag<?> tag, Iterable<WindowedValue<T>> data,
-            Coder<Iterable<WindowedValue<T>>> dataCoder, W window, Coder<W> windowCoder) throws IOException {
-        throw new UnsupportedOperationException("Writing side-input data is not supported.");
-    }
-
-    @Override
-    public StateInternals stateInternals() {
-        return stateInternals;
-    }
-
-    @Override
-    public TimerInternals timerInternals() {
-        return timerInternals;
-    }
-
-    public void setStateInternals(StateInternals stateInternals) {
-        this.stateInternals = stateInternals;
-    }
-
-    public void setTimerInternals(TimerInternals timerInternals) {
-        this.timerInternals = timerInternals;
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/RunnerUtils.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/RunnerUtils.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/RunnerUtils.java
deleted file mode 100644
index 6cf3ae5..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/RunnerUtils.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.util;
-
-import com.alibaba.jstorm.beam.translation.runtime.Executor;
-
-import com.alibaba.jstorm.beam.translation.runtime.GroupByWindowExecutor;
-import com.alibaba.jstorm.beam.translation.runtime.MultiStatefulDoFnExecutor;
-import com.alibaba.jstorm.beam.translation.runtime.StatefulDoFnExecutor;
-import org.apache.beam.runners.core.KeyedWorkItem;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-
-public class RunnerUtils {
-    /**
-     * Convert WindowedValue<KV<>> into KeyedWorkItem<K, WindowedValue<V>>
-     * @param elem
-     * @return
-     */
-    public static <K, V> KeyedWorkItem<K, V> toKeyedWorkItem(WindowedValue<KV<K, V>> elem) {
-        WindowedValue<KV<K, V>> kvElem = (WindowedValue<KV<K, V>>) elem;
-        SingletonKeyedWorkItem<K, V> workItem = SingletonKeyedWorkItem.of(
-                kvElem.getValue().getKey(),
-                kvElem.withValue(kvElem.getValue().getValue()));
-        return workItem;
-    }
-
-    public static boolean isGroupByKeyExecutor (Executor executor) {
-        if (executor instanceof GroupByWindowExecutor) {
-            return true;
-        } else if (executor instanceof StatefulDoFnExecutor ||
-                executor instanceof MultiStatefulDoFnExecutor) {
-            return true;
-        } else {
-            return false;
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/SerializedPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/SerializedPipelineOptions.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/SerializedPipelineOptions.java
deleted file mode 100644
index 543db1c..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/SerializedPipelineOptions.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.alibaba.jstorm.beam.util;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.beam.sdk.options.PipelineOptions;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.Serializable;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Encapsulates the PipelineOptions in serialized form to ship them to the cluster.
- */
-public class SerializedPipelineOptions implements Serializable {
-
-    private final byte[] serializedOptions;
-
-    /** Lazily initialized copy of deserialized options */
-    private transient PipelineOptions pipelineOptions;
-
-    public SerializedPipelineOptions(PipelineOptions options) {
-        checkNotNull(options, "PipelineOptions must not be null.");
-
-        try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
-            new ObjectMapper().writeValue(baos, options);
-            this.serializedOptions = baos.toByteArray();
-        } catch (Exception e) {
-            throw new RuntimeException("Couldn't serialize PipelineOptions.", e);
-        }
-
-    }
-
-    public PipelineOptions getPipelineOptions() {
-        if (pipelineOptions == null) {
-            try {
-                pipelineOptions = new ObjectMapper().readValue(serializedOptions, PipelineOptions.class);
-            } catch (IOException e) {
-                throw new RuntimeException("Couldn't deserialize the PipelineOptions.", e);
-            }
-        }
-
-        return pipelineOptions;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/SingletonKeyedWorkItem.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/SingletonKeyedWorkItem.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/SingletonKeyedWorkItem.java
deleted file mode 100644
index 2f9b224..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/util/SingletonKeyedWorkItem.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.util;
-
-import java.util.Collections;
-import org.apache.beam.runners.core.KeyedWorkItem;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.sdk.util.WindowedValue;
-
-/**
- * Singleton keyed word item.
- * @param <K>
- * @param <ElemT>
- */
-public class SingletonKeyedWorkItem<K, ElemT> implements KeyedWorkItem<K, ElemT> {
-
-  final K key;
-  final WindowedValue<ElemT> value;
-
-  private SingletonKeyedWorkItem(K key, WindowedValue<ElemT> value) {
-    this.key = key;
-    this.value = value;
-  }
-
-  public static <K, ElemT> SingletonKeyedWorkItem<K, ElemT> of(K key, WindowedValue<ElemT> value) {
-      return new SingletonKeyedWorkItem<K, ElemT>(key, value);
-  }
-
-  @Override
-  public K key() {
-    return key;
-  }
-
-  public WindowedValue<ElemT> value() {
-    return value;
-  }
-
-  @Override
-  public Iterable<TimerInternals.TimerData> timersIterable() {
-    return Collections.EMPTY_LIST;
-  }
-
-  @Override
-  public Iterable<WindowedValue<ElemT>> elementsIterable() {
-    return Collections.singletonList(value);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormPipelineOptions.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormPipelineOptions.java
new file mode 100644
index 0000000..457beb6
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormPipelineOptions.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm;
+
+import avro.shaded.com.google.common.collect.Maps;
+import org.apache.beam.sdk.options.*;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Options which can be used to configure a JStorm PipelineRunner.
+ */
+public interface StormPipelineOptions extends PipelineOptions, ApplicationNameOptions, StreamingOptions {
+
+    @Description("Indicate if the topology is running on local machine or distributed cluster")
+    @Default.Boolean(false)
+    Boolean getLocalMode();
+    void setLocalMode(Boolean isLocal);
+
+    @Description("Executing time(sec) of topology on local mode. Default is 1min.")
+    @Default.Long(60)
+    Long getLocalModeExecuteTime();
+    void setLocalModeExecuteTime(Long time);
+
+    @Description("Worker number of topology")
+    @Default.Integer(1)
+    Integer getWorkerNumber();
+    void setWorkerNumber(Integer number);
+
+    @Description("Global parallelism number of a component")
+    @Default.Integer(1)
+    Integer getParallelismNumber();
+    void setParallelismNumber(Integer number);
+
+    @Description("System topology config of JStorm")
+    @Default.InstanceFactory(DefaultMapValueFactory.class)
+    Map getTopologyConfig();
+    void setTopologyConfig(Map conf);
+
+    @Description("Indicate if it is an exactly once topology")
+    @Default.Boolean(false)
+    Boolean getExactlyOnceTopology();
+    void setExactlyOnceTopology(Boolean isExactlyOnce);
+
+    @Description("Parallelism number of a specified composite PTransform")
+    @Default.InstanceFactory(DefaultMapValueFactory.class)
+    Map getParallelismNumMap();
+    void setParallelismNumMap(Map parallelismNumMap);
+
+    class DefaultMapValueFactory implements DefaultValueFactory<Map> {
+        @Override
+        public Map create(PipelineOptions pipelineOptions) {
+            return Maps.newHashMap();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormRegistrar.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormRegistrar.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormRegistrar.java
new file mode 100644
index 0000000..12b3c18
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormRegistrar.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm;
+
+import com.google.auto.service.AutoService;
+import com.google.common.collect.ImmutableList;
+
+import org.apache.beam.sdk.PipelineRunner;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.options.PipelineOptionsRegistrar;
+import org.apache.beam.sdk.runners.PipelineRunnerRegistrar;
+
+public class StormRegistrar {
+    private StormRegistrar() {
+    }
+
+    @AutoService(PipelineRunnerRegistrar.class)
+    public static class Runner implements PipelineRunnerRegistrar {
+        @Override
+        public Iterable<Class<? extends PipelineRunner<?>>> getPipelineRunners() {
+            return ImmutableList.<Class<? extends PipelineRunner<?>>> of(StormRunner.class);
+        }
+    }
+
+    @AutoService(PipelineOptionsRegistrar.class)
+    public static class Options implements PipelineOptionsRegistrar {
+        @Override
+        public Iterable<Class<? extends PipelineOptions>> getPipelineOptions() {
+            return ImmutableList.<Class<? extends PipelineOptions>> of(StormPipelineOptions.class);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormRunner.java
new file mode 100644
index 0000000..8bee49f
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/StormRunner.java
@@ -0,0 +1,345 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import backtype.storm.Config;
+import backtype.storm.LocalCluster;
+import backtype.storm.StormSubmitter;
+import backtype.storm.generated.StormTopology;
+import backtype.storm.topology.BoltDeclarer;
+import backtype.storm.topology.IRichBolt;
+import backtype.storm.topology.IRichSpout;
+import backtype.storm.topology.TopologyBuilder;
+import backtype.storm.tuple.Fields;
+
+import com.alibaba.jstorm.beam.serialization.*;
+import org.apache.beam.runners.jstorm.serialization.ImmutableListSerializer;
+import org.apache.beam.runners.jstorm.serialization.ImmutableMapSerializer;
+import org.apache.beam.runners.jstorm.serialization.ImmutableSetSerializer;
+import org.apache.beam.runners.jstorm.serialization.KvStoreIterableSerializer;
+import org.apache.beam.runners.jstorm.serialization.SdkRepackImmuListSerializer;
+import org.apache.beam.runners.jstorm.serialization.SdkRepackImmuSetSerializer;
+import org.apache.beam.runners.jstorm.serialization.UnmodifiableCollectionsSerializer;
+import org.apache.beam.runners.jstorm.translation.StormPipelineTranslator;
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.runners.jstorm.translation.runtime.AbstractComponent;
+import org.apache.beam.runners.jstorm.translation.runtime.AdaptorBasicBolt;
+import org.apache.beam.runners.jstorm.translation.runtime.AdaptorBasicSpout;
+import org.apache.beam.runners.jstorm.translation.runtime.ExecutorsBolt;
+import org.apache.beam.runners.jstorm.translation.runtime.TxExecutorsBolt;
+import org.apache.beam.runners.jstorm.translation.runtime.TxUnboundedSourceSpout;
+import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
+import org.apache.beam.runners.jstorm.translation.translator.Stream;
+import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
+import com.alibaba.jstorm.cache.KvStoreIterable;
+import com.alibaba.jstorm.cluster.StormConfig;
+import com.alibaba.jstorm.transactional.TransactionTopologyBuilder;
+import com.alibaba.jstorm.utils.JStormUtils;
+
+import java.io.IOException;
+import java.util.HashMap;
+
+import java.util.Map;
+
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.PipelineResult;
+import org.apache.beam.sdk.PipelineRunner;
+import org.apache.beam.sdk.metrics.MetricResults;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.options.PipelineOptionsValidator;
+import org.joda.time.Duration;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Main entry point into the Storm Runner.
+ * 
+ * After reading the user defined pipeline, Beam will invoke the run() method with a representation of the pipeline.
+ */
+public class StormRunner extends PipelineRunner<StormRunner.StormPipelineResult> {
+    private static final Logger LOG = LoggerFactory.getLogger(StormRunner.class);
+
+    private StormPipelineOptions options;
+
+    public StormRunner(StormPipelineOptions options) {
+        this.options = options;
+    }
+
+    public static StormRunner fromOptions(PipelineOptions options) {
+        StormPipelineOptions pipelineOptions = PipelineOptionsValidator.validate(StormPipelineOptions.class, options);
+        return new StormRunner(pipelineOptions);
+    }
+
+    /**
+     * convert pipeline options to storm configuration format
+     * @param options
+     * @return
+     */
+    private Config convertPipelineOptionsToConfig(StormPipelineOptions options) {
+        Config config = new Config();
+        if (options.getLocalMode())
+            config.put(Config.STORM_CLUSTER_MODE, "local");
+        else
+            config.put(Config.STORM_CLUSTER_MODE, "distributed");
+        
+        Config.setNumWorkers(config, options.getWorkerNumber());
+
+        config.putAll(options.getTopologyConfig());
+
+        // Setup config for runtime env
+        config.put("worker.external", "beam");
+        config.put("topology.acker.executors", 0);
+
+        UnmodifiableCollectionsSerializer.registerSerializers(config);
+        // register classes of guava utils, ImmutableList, ImmutableSet, ImmutableMap
+        ImmutableListSerializer.registerSerializers(config);
+        SdkRepackImmuListSerializer.registerSerializers(config);
+        ImmutableSetSerializer.registerSerializers(config);
+        SdkRepackImmuSetSerializer.registerSerializers(config);
+        ImmutableMapSerializer.registerSerializers(config);
+
+        config.registerDefaultSerailizer(KvStoreIterable.class, KvStoreIterableSerializer.class);
+        return config;
+    }
+
+    @Override
+    public StormPipelineResult run(Pipeline pipeline) {
+        LOG.info("Running pipeline...");
+        TranslationContext context = new TranslationContext(this.options);
+        StormPipelineTranslator transformer = new StormPipelineTranslator(context);
+        transformer.translate(pipeline);
+        LOG.info("UserGraphContext=\n{}", context.getUserGraphContext());
+        LOG.info("ExecutionGraphContext=\n{}", context.getExecutionGraphContext());
+
+        for (Stream stream : context.getExecutionGraphContext().getStreams()) {
+            LOG.info(stream.getProducer().getComponentId() + " --> " + stream.getConsumer().getComponentId());
+        }
+
+        String topologyName = options.getJobName();
+        Config config = convertPipelineOptionsToConfig(options);
+
+        return runTopology(
+                topologyName,
+                getTopology(options, context.getExecutionGraphContext()),
+                config);
+    }
+
+    private StormPipelineResult runTopology(String topologyName, StormTopology topology, Config config) {
+        try {
+            if (StormConfig.local_mode(config)) {
+                LocalCluster localCluster = LocalCluster.getInstance();
+                localCluster.submitTopology(topologyName, config, topology);
+                return new LocalStormPipelineResult(
+                        topologyName, config, localCluster, options.getLocalModeExecuteTime());
+            } else {
+                StormSubmitter.submitTopology(topologyName, config, topology);
+                return null;
+            }
+        } catch (Exception e) {
+            LOG.warn("Fail to submit topology", e);
+            throw new RuntimeException("Fail to submit topology", e);
+        }
+    }
+
+    public static abstract class StormPipelineResult implements PipelineResult {
+
+        private final String topologyName;
+        private final Config config;
+
+        StormPipelineResult(String topologyName, Config config) {
+            this.config = checkNotNull(config, "config");
+            this.topologyName = checkNotNull(topologyName, "topologyName");
+        }
+
+        public State getState() {
+            return null;
+        }
+
+        public Config getConfig() {
+            return config;
+        }
+
+        public String getTopologyName() {
+            return topologyName;
+        }
+    }
+
+    public static class LocalStormPipelineResult extends StormPipelineResult {
+
+        private LocalCluster localCluster;
+        private long localModeExecuteTimeSecs;
+
+        LocalStormPipelineResult(
+                String topologyName,
+                Config config,
+                LocalCluster localCluster,
+                long localModeExecuteTimeSecs) {
+            super(topologyName, config);
+            this.localCluster = checkNotNull(localCluster, "localCluster");
+        }
+
+        @Override
+        public State cancel() throws IOException {
+            //localCluster.deactivate(getTopologyName());
+            localCluster.killTopology(getTopologyName());
+            localCluster.shutdown();
+            JStormUtils.sleepMs(1000);
+            return State.CANCELLED;
+        }
+
+        @Override
+        public State waitUntilFinish(Duration duration) {
+            return waitUntilFinish();
+        }
+
+        @Override
+        public State waitUntilFinish() {
+            JStormUtils.sleepMs(localModeExecuteTimeSecs * 1000);
+            try {
+                return cancel();
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            }
+        }
+
+        @Override
+        public MetricResults metrics() {
+            return null;
+        }
+    }
+
+    private AbstractComponent getComponent(String id, TranslationContext.ExecutionGraphContext context) {
+        AbstractComponent component = null;
+        AdaptorBasicSpout spout = context.getSpout(id);
+        if (spout != null) {
+            component = spout;
+        } else {
+            AdaptorBasicBolt bolt = context.getBolt(id);
+            if (bolt != null)
+                component = bolt;
+        }
+
+        return component;
+    }
+
+    private StormTopology getTopology(StormPipelineOptions options, TranslationContext.ExecutionGraphContext context) {
+        boolean isExactlyOnce = options.getExactlyOnceTopology();
+        TopologyBuilder builder = isExactlyOnce ? new TransactionTopologyBuilder() : new TopologyBuilder();
+
+        int parallelismNumber = options.getParallelismNumber();
+        Map<String, AdaptorBasicSpout> spouts = context.getSpouts();
+        for (String id : spouts.keySet()) {
+            IRichSpout spout = getSpout(isExactlyOnce, spouts.get(id));
+            builder.setSpout(id, spout, getParallelismNum(spouts.get(id), parallelismNumber));
+        }
+
+        HashMap<String, BoltDeclarer> declarers = new HashMap<>();
+        Iterable<Stream> streams = context.getStreams();
+        LOG.info("streams=" + streams);
+        for (Stream stream : streams) {
+            String destBoltId = stream.getConsumer().getComponentId();
+            IRichBolt bolt = getBolt(isExactlyOnce, context.getBolt(destBoltId));
+            BoltDeclarer declarer = declarers.get(destBoltId);
+            if (declarer == null) {
+                declarer = builder.setBolt(destBoltId, bolt,
+                        getParallelismNum(context.getBolt(destBoltId), parallelismNumber));
+                declarers.put(destBoltId, declarer);
+            }
+
+            Stream.Grouping grouping = stream.getConsumer().getGrouping();
+            String streamId = stream.getProducer().getStreamId();
+            String srcBoltId = stream.getProducer().getComponentId();
+
+            // add stream output declare for "from" component
+            AbstractComponent component = getComponent(srcBoltId, context);
+            if (grouping.getType().equals(Stream.Grouping.Type.FIELDS))
+                component.addKVOutputField(streamId);
+            else
+                component.addOutputField(streamId);
+
+            // "to" component declares grouping to "from" component 
+            switch (grouping.getType()) {
+            case SHUFFLE:
+                declarer.shuffleGrouping(srcBoltId, streamId);
+                break;
+            case FIELDS:
+                declarer.fieldsGrouping(srcBoltId, streamId, new Fields(grouping.getFields()));
+                break;
+            case ALL:
+                declarer.allGrouping(srcBoltId, streamId);
+                break;
+            case DIRECT:
+                declarer.directGrouping(srcBoltId, streamId);
+                break;
+            case GLOBAL:
+                declarer.globalGrouping(srcBoltId, streamId);
+                break;
+            case LOCAL_OR_SHUFFLE:
+                declarer.localOrShuffleGrouping(srcBoltId, streamId);
+                break;
+            case NONE:
+                declarer.noneGrouping(srcBoltId, streamId);
+                break;
+            default:
+                throw new UnsupportedOperationException("unsupported grouping type: " + grouping);
+            }
+
+            // Subscribe grouping of water mark stream
+            component.addOutputField(CommonInstance.BEAM_WATERMARK_STREAM_ID);
+            declarer.allGrouping(srcBoltId, CommonInstance.BEAM_WATERMARK_STREAM_ID);
+        }
+
+        if (isExactlyOnce) {
+            ((TransactionTopologyBuilder) builder).enableHdfs();
+        }
+        return builder.createTopology();
+    }
+
+    private IRichSpout getSpout(boolean isExactlyOnce, IRichSpout spout) {
+        IRichSpout ret = null;
+        if (isExactlyOnce) {
+            if (spout instanceof UnboundedSourceSpout) {
+                ret = new TxUnboundedSourceSpout((UnboundedSourceSpout) spout);
+            } else {
+                String error = String.format("The specified type(%s) is not supported in exactly once mode yet!", spout.getClass().toString());
+                throw new RuntimeException(error);
+            }
+        } else {
+            ret = spout;
+        }
+        return ret;
+    }
+
+    private IRichBolt getBolt(boolean isExactlyOnce, ExecutorsBolt bolt) {
+        return isExactlyOnce ? new TxExecutorsBolt(bolt) : bolt;
+    }
+
+    /**
+     * Calculate the final parallelism number according to the configured number and global number.
+     * @param component
+     * @param globalParallelismNum
+     * @return final parallelism number for the specified component
+     */
+    private int getParallelismNum(AbstractComponent component, int globalParallelismNum) {
+        int configParallelismNum = component.getParallelismNum();
+        return configParallelismNum > 0 ? configParallelismNum : globalParallelismNum;
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
new file mode 100644
index 0000000..fa7bdf3
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
@@ -0,0 +1,120 @@
+package org.apache.beam.runners.jstorm;
+
+import avro.shaded.com.google.common.collect.Maps;
+import com.alibaba.jstorm.common.metric.AsmMetric;
+import com.alibaba.jstorm.metric.*;
+import com.alibaba.jstorm.utils.JStormUtils;
+import com.google.common.base.Optional;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.PipelineRunner;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.testing.PAssert;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+/**
+ * Test JStorm runner.
+ */
+public class TestJStormRunner extends PipelineRunner<StormRunner.StormPipelineResult> {
+
+    private static final Logger LOG = LoggerFactory.getLogger(TestJStormRunner.class);
+
+    public static TestJStormRunner fromOptions(PipelineOptions options) {
+        return new TestJStormRunner(options.as(StormPipelineOptions.class));
+    }
+
+    private final StormRunner stormRunner;
+    private final StormPipelineOptions options;
+
+    private TestJStormRunner(StormPipelineOptions options) {
+        this.options = options;
+        Map conf = Maps.newHashMap();
+        //conf.put(ConfigExtension.KV_STORE_TYPE, KvStoreManagerFactory.KvStoreType.memory.toString());
+        options.setTopologyConfig(conf);
+        options.setLocalMode(true);
+        stormRunner = StormRunner.fromOptions(checkNotNull(options, "options"));
+    }
+
+    @Override
+    public StormRunner.StormPipelineResult run(Pipeline pipeline) {
+        StormRunner.StormPipelineResult result = stormRunner.run(pipeline);
+
+        try {
+            int numberOfAssertions = PAssert.countAsserts(pipeline);
+
+            LOG.info("Running JStorm job {} with {} expected assertions.", result.getTopologyName(), numberOfAssertions);
+            if(numberOfAssertions == 0) {
+                // If assert number is zero, wait 5 sec
+                JStormUtils.sleepMs(5000);
+                return result;
+            } else {
+                for (int i = 0; i < 40; ++i) {
+                    Optional<Boolean> success = checkForPAssertSuccess(numberOfAssertions);
+                    if (success.isPresent() && success.get()) {
+                        return result;
+                    } else if (success.isPresent() && !success.get()) {
+                        throw new AssertionError("Failed assertion checks.");
+                    } else {
+                        JStormUtils.sleepMs(500);
+                    }
+                }
+                LOG.info("Assertion checks timed out.");
+                throw new AssertionError("Assertion checks timed out.");
+            }
+        } finally {
+            clearPAssertCount();
+            cancel(result);
+        }
+    }
+
+    private Optional<Boolean> checkForPAssertSuccess(int expectedNumberOfAssertions) {
+        int successes = 0;
+        for (AsmMetric metric : JStormMetrics.search(PAssert.SUCCESS_COUNTER, MetaType.TASK, MetricType.COUNTER)) {
+            successes += ((Long) metric.getValue(AsmWindow.M1_WINDOW)).intValue();
+        }
+        int failures = 0;
+        for (AsmMetric metric : JStormMetrics.search(PAssert.FAILURE_COUNTER, MetaType.TASK, MetricType.COUNTER)) {
+            failures += ((Long) metric.getValue(AsmWindow.M1_WINDOW)).intValue();
+        }
+
+        if (failures > 0) {
+            LOG.info("Found {} success, {} failures out of {} expected assertions.",
+                    successes, failures, expectedNumberOfAssertions);
+            return Optional.of(false);
+        } else if (successes >= expectedNumberOfAssertions) {
+            LOG.info("Found {} success, {} failures out of {} expected assertions.",
+                    successes, failures, expectedNumberOfAssertions);
+            return Optional.of(true);
+        }
+
+        LOG.info("Found {} success, {} failures out of {} expected assertions.",
+                successes, failures, expectedNumberOfAssertions);
+        return Optional.absent();
+    }
+
+    private void clearPAssertCount() {
+        String topologyName = options.getJobName();
+        AsmMetricRegistry taskMetrics = JStormMetrics.getTaskMetrics();
+        Iterator<Map.Entry<String, AsmMetric>> itr = taskMetrics.getMetrics().entrySet().iterator();
+        while (itr.hasNext()) {
+            Map.Entry<String, AsmMetric> metric = itr.next();
+            if (metric.getKey().contains(topologyName)) {
+                itr.remove();
+            }
+        }
+    }
+
+    private void cancel(StormRunner.StormPipelineResult result) {
+        try {
+            result.cancel();
+        } catch (IOException e) {
+            throw new RuntimeException("Failed to cancel.", e);
+}
+    }
+}


[05/53] [abbrv] beam git commit: jstorm-runner: rename the package to org.apache.beam.runners.jstorm.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
new file mode 100644
index 0000000..aa7d325
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
@@ -0,0 +1,92 @@
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import org.apache.beam.runners.jstorm.util.RunnerUtils;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+import com.google.common.collect.*;
+
+public class ImmutableListSerializer extends Serializer<ImmutableList<Object>> {
+
+    private static final boolean DOES_NOT_ACCEPT_NULL = false;
+    private static final boolean IMMUTABLE = true;
+
+    public ImmutableListSerializer() {
+        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
+        output.writeInt(object.size(), true);
+        for (Object elm : object) {
+            kryo.writeClassAndObject(output, elm);
+        }
+    }
+
+    @Override
+    public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
+        final int size = input.readInt(true);
+        final Object[] list = new Object[size];
+        for (int i = 0; i < size; ++i) {
+            list[i] = kryo.readClassAndObject(input);
+        }
+        return ImmutableList.copyOf(list);
+    }
+
+    /**
+     * Creates a new {@link ImmutableListSerializer} and registers its serializer
+     * for the several ImmutableList related classes.
+     */
+    public static void registerSerializers(Config config) {
+
+        // ImmutableList (abstract class)
+        //  +- RegularImmutableList
+        //  |   RegularImmutableList
+        //  +- SingletonImmutableList
+        //  |   Optimized for List with only 1 element.
+        //  +- SubList
+        //  |   Representation for part of ImmutableList
+        //  +- ReverseImmutableList
+        //  |   For iterating in reverse order
+        //  +- StringAsImmutableList
+        //  |   Used by Lists#charactersOf
+        //  +- Values (ImmutableTable values)
+        //      Used by return value of #values() when there are multiple cells
+
+        config.registerSerialization(ImmutableList.class, ImmutableListSerializer.class);
+        config.registerSerialization(
+                RunnerUtils.getBeamSdkRepackClass(ImmutableList.class), ImmutableListSerializer.class);
+
+        // Note:
+        //  Only registering above is good enough for serializing/deserializing.
+        //  but if using Kryo#copy, following is required.
+
+        config.registerSerialization(ImmutableList.of().getClass(), ImmutableListSerializer.class);
+        config.registerSerialization(
+                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of().getClass()), ImmutableListSerializer.class);
+        config.registerSerialization(ImmutableList.of(1).getClass(), ImmutableListSerializer.class);
+        config.registerSerialization(
+                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of(1).getClass()), ImmutableListSerializer.class);
+        config.registerSerialization(ImmutableList.of(1,2,3).subList(1, 2).getClass(), ImmutableListSerializer.class);
+        config.registerSerialization(
+                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of(1,2,3).subList(1, 2).getClass()), ImmutableListSerializer.class);
+        config.registerSerialization(ImmutableList.of().reverse().getClass(), ImmutableListSerializer.class);
+        config.registerSerialization(
+                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of().reverse().getClass()), ImmutableListSerializer.class);
+
+        config.registerSerialization(Lists.charactersOf("KryoRocks").getClass(), ImmutableListSerializer.class);
+        config.registerSerialization(
+                RunnerUtils.getBeamSdkRepackClass(Lists.charactersOf("KryoRocks").getClass()), ImmutableListSerializer.class);
+
+        Table<Integer,Integer,Integer> baseTable = HashBasedTable.create();
+        baseTable.put(1, 2, 3);
+        baseTable.put(4, 5, 6);
+        Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
+        config.registerSerialization(table.values().getClass(), ImmutableListSerializer.class);
+        config.registerSerialization(
+                RunnerUtils.getBeamSdkRepackClass(table.values().getClass()), ImmutableListSerializer.class);
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
new file mode 100644
index 0000000..ee8b765
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
@@ -0,0 +1,61 @@
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
+
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Map;
+
+public class ImmutableMapSerializer extends Serializer<ImmutableMap<Object, ? extends Object>> {
+
+    private static final boolean DOES_NOT_ACCEPT_NULL = true;
+    private static final boolean IMMUTABLE = true;
+
+    public ImmutableMapSerializer() {
+        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, ImmutableMap<Object, ? extends Object> immutableMap) {
+        kryo.writeObject(output, Maps.newHashMap(immutableMap));
+    }
+
+    @Override
+    public ImmutableMap<Object, Object> read(Kryo kryo, Input input, Class<ImmutableMap<Object, ? extends Object>> type) {
+        Map map = kryo.readObject(input, HashMap.class);
+        return ImmutableMap.copyOf(map);
+    }
+
+    /**
+     * Creates a new {@link ImmutableMapSerializer} and registers its serializer
+     * for the several ImmutableMap related classes.
+     */
+    public static void registerSerializers(Config config) {
+
+        config.registerSerialization(ImmutableMap.class, ImmutableMapSerializer.class);
+        config.registerSerialization(ImmutableMap.of().getClass(), ImmutableMapSerializer.class);
+
+        Object o1 = new Object();
+        Object o2 = new Object();
+
+        config.registerSerialization(ImmutableMap.of(o1, o1).getClass(), ImmutableMapSerializer.class);
+        config.registerSerialization(ImmutableMap.of(o1, o1, o2, o2).getClass(), ImmutableMapSerializer.class);
+        Map<DummyEnum,Object> enumMap = new EnumMap<DummyEnum, Object>(DummyEnum.class);
+        for (DummyEnum e : DummyEnum.values()) {
+            enumMap.put(e, o1);
+        }
+
+        config.registerSerialization(ImmutableMap.copyOf(enumMap).getClass(), ImmutableMapSerializer.class);
+    }
+
+    private enum DummyEnum {
+        VALUE1,
+        VALUE2
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
new file mode 100644
index 0000000..cdc4382
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
@@ -0,0 +1,71 @@
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Sets;
+
+public class ImmutableSetSerializer extends Serializer<ImmutableSet<Object>> {
+
+    private static final boolean DOES_NOT_ACCEPT_NULL = false;
+    private static final boolean IMMUTABLE = true;
+
+    public ImmutableSetSerializer() {
+        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
+        output.writeInt(object.size(), true);
+        for (Object elm : object) {
+            kryo.writeClassAndObject(output, elm);
+        }
+    }
+
+    @Override
+    public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
+        final int size = input.readInt(true);
+        ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
+        for (int i = 0; i < size; ++i) {
+            builder.add(kryo.readClassAndObject(input));
+        }
+        return builder.build();
+    }
+
+    /**
+     * Creates a new {@link ImmutableSetSerializer} and registers its serializer
+     * for the several ImmutableSet related classes.
+     */
+    public static void registerSerializers(Config config) {
+
+        // ImmutableList (abstract class)
+        //  +- EmptyImmutableSet
+        //  |   EmptyImmutableSet
+        //  +- SingletonImmutableSet
+        //  |   Optimized for Set with only 1 element.
+        //  +- RegularImmutableSet
+        //  |   RegularImmutableList
+        //  +- EnumImmutableSet
+        //  |   EnumImmutableSet
+
+        config.registerSerialization(ImmutableSet.class, ImmutableSetSerializer.class);
+
+        // Note:
+        //  Only registering above is good enough for serializing/deserializing.
+        //  but if using Kryo#copy, following is required.
+
+        config.registerSerialization(ImmutableSet.of().getClass(), ImmutableSetSerializer.class);
+        config.registerSerialization(ImmutableSet.of(1).getClass(), ImmutableSetSerializer.class);
+        config.registerSerialization(ImmutableSet.of(1,2,3).getClass(), ImmutableSetSerializer.class);
+
+        config.registerSerialization(
+                Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(), ImmutableSetSerializer.class);
+    }
+
+    private enum SomeEnum {
+        A, B, C
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
new file mode 100644
index 0000000..decfb3f
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
@@ -0,0 +1,55 @@
+package org.apache.beam.runners.jstorm.serialization;
+
+import com.alibaba.jstorm.cache.KvStoreIterable;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+import com.google.common.collect.Lists;
+
+import java.util.Iterator;
+import java.util.List;
+
+public class KvStoreIterableSerializer extends Serializer<KvStoreIterable<Object>> {
+
+    public KvStoreIterableSerializer() {
+
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, KvStoreIterable<Object> object) {
+        List<Object> values = Lists.newArrayList(object);
+        output.writeInt(values.size(), true);
+        for (Object elm : object) {
+            kryo.writeClassAndObject(output, elm);
+        }
+    }
+
+    @Override
+    public KvStoreIterable<Object> read(Kryo kryo, Input input, Class<KvStoreIterable<Object>> type) {
+        final int size = input.readInt(true);
+        List<Object> values = Lists.newArrayList();
+        for (int i = 0; i < size; ++i) {
+            values.add(kryo.readClassAndObject(input));
+        }
+
+        return new KvStoreIterable<Object>() {
+            Iterable<Object> values;
+
+            @Override
+            public Iterator<Object> iterator() {
+                return values.iterator();
+            }
+
+            public KvStoreIterable init(Iterable<Object> values) {
+                this.values = values;
+                return this;
+            }
+
+            @Override
+            public String toString() {
+                return values.toString();
+            }
+        }.init(values);
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
new file mode 100644
index 0000000..9bb315b
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
@@ -0,0 +1,78 @@
+package org.apache.beam.runners.jstorm.serialization;
+
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.*;
+
+public class SdkRepackImmuListSerializer extends Serializer<ImmutableList<Object>> {
+
+    private static final boolean DOES_NOT_ACCEPT_NULL = false;
+    private static final boolean IMMUTABLE = true;
+
+    public SdkRepackImmuListSerializer() {
+        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
+        output.writeInt(object.size(), true);
+        for (Object elm : object) {
+            kryo.writeClassAndObject(output, elm);
+        }
+    }
+
+    @Override
+    public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
+        final int size = input.readInt(true);
+        final Object[] list = new Object[size];
+        for (int i = 0; i < size; ++i) {
+            list[i] = kryo.readClassAndObject(input);
+        }
+        return ImmutableList.copyOf(list);
+    }
+
+    /**
+     * Creates a new {@link ImmutableListSerializer} and registers its serializer
+     * for the several ImmutableList related classes.
+     */
+    public static void registerSerializers(Config config) {
+
+        // ImmutableList (abstract class)
+        //  +- RegularImmutableList
+        //  |   RegularImmutableList
+        //  +- SingletonImmutableList
+        //  |   Optimized for List with only 1 element.
+        //  +- SubList
+        //  |   Representation for part of ImmutableList
+        //  +- ReverseImmutableList
+        //  |   For iterating in reverse order
+        //  +- StringAsImmutableList
+        //  |   Used by Lists#charactersOf
+        //  +- Values (ImmutableTable values)
+        //      Used by return value of #values() when there are multiple cells
+
+        config.registerSerialization(ImmutableList.class, SdkRepackImmuListSerializer.class);
+
+        // Note:
+        //  Only registering above is good enough for serializing/deserializing.
+        //  but if using Kryo#copy, following is required.
+
+        config.registerSerialization(ImmutableList.of().getClass(), SdkRepackImmuListSerializer.class);
+        config.registerSerialization(ImmutableList.of(1).getClass(), SdkRepackImmuListSerializer.class);
+        config.registerSerialization(ImmutableList.of(1,2,3).subList(1, 2).getClass(), SdkRepackImmuListSerializer.class);
+        config.registerSerialization(ImmutableList.of().reverse().getClass(), SdkRepackImmuListSerializer.class);
+
+        config.registerSerialization(Lists.charactersOf("KryoRocks").getClass(), SdkRepackImmuListSerializer.class);
+
+        Table<Integer,Integer,Integer> baseTable = HashBasedTable.create();
+        baseTable.put(1, 2, 3);
+        baseTable.put(4, 5, 6);
+        Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
+        config.registerSerialization(table.values().getClass(), SdkRepackImmuListSerializer.class);
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
new file mode 100644
index 0000000..a514645
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
@@ -0,0 +1,71 @@
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.*;
+
+public class SdkRepackImmuSetSerializer extends Serializer<ImmutableSet<Object>> {
+
+    private static final boolean DOES_NOT_ACCEPT_NULL = false;
+    private static final boolean IMMUTABLE = true;
+
+    public SdkRepackImmuSetSerializer() {
+        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+    }
+
+    @Override
+    public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
+        output.writeInt(object.size(), true);
+        for (Object elm : object) {
+            kryo.writeClassAndObject(output, elm);
+        }
+    }
+
+    @Override
+    public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
+        final int size = input.readInt(true);
+        ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
+        for (int i = 0; i < size; ++i) {
+            builder.add(kryo.readClassAndObject(input));
+        }
+        return builder.build();
+    }
+
+    /**
+     * Creates a new {@link ImmutableSetSerializer} and registers its serializer
+     * for the several ImmutableSet related classes.
+     */
+    public static void registerSerializers(Config config) {
+
+        // ImmutableList (abstract class)
+        //  +- EmptyImmutableSet
+        //  |   EmptyImmutableSet
+        //  +- SingletonImmutableSet
+        //  |   Optimized for Set with only 1 element.
+        //  +- RegularImmutableSet
+        //  |   RegularImmutableList
+        //  +- EnumImmutableSet
+        //  |   EnumImmutableSet
+
+        config.registerSerialization(ImmutableSet.class, SdkRepackImmuSetSerializer.class);
+
+        // Note:
+        //  Only registering above is good enough for serializing/deserializing.
+        //  but if using Kryo#copy, following is required.
+
+        config.registerSerialization(ImmutableSet.of().getClass(), SdkRepackImmuSetSerializer.class);
+        config.registerSerialization(ImmutableSet.of(1).getClass(), SdkRepackImmuSetSerializer.class);
+        config.registerSerialization(ImmutableSet.of(1,2,3).getClass(), SdkRepackImmuSetSerializer.class);
+
+        config.registerSerialization(
+                Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(), SdkRepackImmuSetSerializer.class);
+    }
+
+    private enum SomeEnum {
+        A, B, C
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
new file mode 100644
index 0000000..c8b0138
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
@@ -0,0 +1,159 @@
+package org.apache.beam.runners.jstorm.serialization;
+
+import backtype.storm.Config;
+import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
+import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
+import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
+
+import java.lang.reflect.Field;
+import java.util.*;
+
+public class UnmodifiableCollectionsSerializer extends Serializer<Object> {
+
+    private static final Field SOURCE_COLLECTION_FIELD;
+    private static final Field SOURCE_MAP_FIELD;
+
+    static {
+        try {
+            SOURCE_COLLECTION_FIELD = Class.forName("java.util.Collections$UnmodifiableCollection" )
+                    .getDeclaredField( "c" );
+            SOURCE_COLLECTION_FIELD.setAccessible( true );
+
+
+            SOURCE_MAP_FIELD = Class.forName("java.util.Collections$UnmodifiableMap" )
+                    .getDeclaredField( "m" );
+            SOURCE_MAP_FIELD.setAccessible( true );
+        } catch ( final Exception e ) {
+            throw new RuntimeException( "Could not access source collection" +
+                    " field in java.util.Collections$UnmodifiableCollection.", e );
+        }
+    }
+
+    @Override
+    public Object read(final Kryo kryo, final Input input, final Class<Object> clazz) {
+        final int ordinal = input.readInt( true );
+        final UnmodifiableCollection unmodifiableCollection = UnmodifiableCollection.values()[ordinal];
+        final Object sourceCollection = kryo.readClassAndObject( input );
+        return unmodifiableCollection.create( sourceCollection );
+    }
+
+    @Override
+    public void write(final Kryo kryo, final Output output, final Object object) {
+        try {
+            final UnmodifiableCollection unmodifiableCollection = UnmodifiableCollection.valueOfType( object.getClass() );
+            // the ordinal could be replaced by s.th. else (e.g. a explicitely managed "id")
+            output.writeInt( unmodifiableCollection.ordinal(), true );
+            kryo.writeClassAndObject( output, unmodifiableCollection.sourceCollectionField.get( object ) );
+        } catch ( final RuntimeException e ) {
+            // Don't eat and wrap RuntimeExceptions because the ObjectBuffer.write...
+            // handles SerializationException specifically (resizing the buffer)...
+            throw e;
+        } catch ( final Exception e ) {
+            throw new RuntimeException( e );
+        }
+    }
+
+    @Override
+    public Object copy(Kryo kryo, Object original) {
+        try {
+            final UnmodifiableCollection unmodifiableCollection = UnmodifiableCollection.valueOfType( original.getClass() );
+            Object sourceCollectionCopy = kryo.copy(unmodifiableCollection.sourceCollectionField.get(original));
+            return unmodifiableCollection.create( sourceCollectionCopy );
+        } catch ( final RuntimeException e ) {
+            // Don't eat and wrap RuntimeExceptions
+            throw e;
+        } catch ( final Exception e ) {
+            throw new RuntimeException( e );
+        }
+    }
+
+    private static enum UnmodifiableCollection {
+        COLLECTION( Collections.unmodifiableCollection( Arrays.asList( "" ) ).getClass(), SOURCE_COLLECTION_FIELD ){
+            @Override
+            public Object create( final Object sourceCollection ) {
+                return Collections.unmodifiableCollection( (Collection<?>) sourceCollection );
+            }
+        },
+        RANDOM_ACCESS_LIST( Collections.unmodifiableList( new ArrayList<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
+            @Override
+            public Object create( final Object sourceCollection ) {
+                return Collections.unmodifiableList( (List<?>) sourceCollection );
+            }
+        },
+        LIST( Collections.unmodifiableList( new LinkedList<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
+            @Override
+            public Object create( final Object sourceCollection ) {
+                return Collections.unmodifiableList( (List<?>) sourceCollection );
+            }
+        },
+        SET( Collections.unmodifiableSet( new HashSet<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
+            @Override
+            public Object create( final Object sourceCollection ) {
+                return Collections.unmodifiableSet( (Set<?>) sourceCollection );
+            }
+        },
+        SORTED_SET( Collections.unmodifiableSortedSet( new TreeSet<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
+            @Override
+            public Object create( final Object sourceCollection ) {
+                return Collections.unmodifiableSortedSet( (SortedSet<?>) sourceCollection );
+            }
+        },
+        MAP( Collections.unmodifiableMap( new HashMap<Void, Void>() ).getClass(), SOURCE_MAP_FIELD ) {
+
+            @Override
+            public Object create( final Object sourceCollection ) {
+                return Collections.unmodifiableMap( (Map<?, ?>) sourceCollection );
+            }
+
+        },
+        SORTED_MAP( Collections.unmodifiableSortedMap( new TreeMap<Void, Void>() ).getClass(), SOURCE_MAP_FIELD ) {
+            @Override
+            public Object create( final Object sourceCollection ) {
+                return Collections.unmodifiableSortedMap( (SortedMap<?, ?>) sourceCollection );
+            }
+        };
+
+        private final Class<?> type;
+        private final Field sourceCollectionField;
+
+        private UnmodifiableCollection( final Class<?> type, final Field sourceCollectionField ) {
+            this.type = type;
+            this.sourceCollectionField = sourceCollectionField;
+        }
+
+        /**
+         * @param sourceCollection
+         */
+        public abstract Object create( Object sourceCollection );
+
+        static UnmodifiableCollection valueOfType(final Class<?> type ) {
+            for( final UnmodifiableCollection item : values() ) {
+                if ( item.type.equals( type ) ) {
+                    return item;
+                }
+            }
+            throw new IllegalArgumentException( "The type " + type + " is not supported." );
+        }
+
+    }
+
+    /**
+     * Creates a new {@link UnmodifiableCollectionsSerializer} and registers its serializer
+     * for the several unmodifiable Collections that can be created via {@link Collections},
+     * including {@link Map}s.
+     *
+     * @see Collections#unmodifiableCollection(Collection)
+     * @see Collections#unmodifiableList(List)
+     * @see Collections#unmodifiableSet(Set)
+     * @see Collections#unmodifiableSortedSet(SortedSet)
+     * @see Collections#unmodifiableMap(Map)
+     * @see Collections#unmodifiableSortedMap(SortedMap)
+     */
+    public static void registerSerializers( Config config ) {
+        UnmodifiableCollection.values();
+        for ( final UnmodifiableCollection item : UnmodifiableCollection.values() ) {
+            config.registerSerialization( item.type, UnmodifiableCollectionsSerializer.class );
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java
new file mode 100644
index 0000000..d907fac
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java
@@ -0,0 +1,177 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import org.apache.beam.runners.jstorm.translation.translator.ViewTranslator;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterables;
+import org.apache.beam.runners.core.construction.PTransformMatchers;
+import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.runners.PTransformOverride;
+import org.apache.beam.sdk.runners.TransformHierarchy;
+import org.apache.beam.sdk.runners.AppliedPTransform;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.View;
+import org.apache.beam.sdk.util.InstanceBuilder;
+import org.apache.beam.sdk.values.PValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.beam.runners.jstorm.translation.translator.TransformTranslator;
+
+import java.util.List;
+
+/**
+ * Pipleline translator of Storm
+ */
+public class StormPipelineTranslator extends Pipeline.PipelineVisitor.Defaults {
+    private static final Logger LOG = LoggerFactory.getLogger(StormPipelineTranslator.class);
+    private TranslationContext context;
+    private int depth = 0;
+
+    public StormPipelineTranslator(TranslationContext context) {
+        this.context = context;
+    }
+
+    public void translate(Pipeline pipeline) {
+        List<PTransformOverride> transformOverrides =
+                ImmutableList.<PTransformOverride>builder()
+                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsIterable.class),
+                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsIterable.class)))
+                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsList.class),
+                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsList.class)))
+                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMap.class),
+                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMap.class)))
+                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMultimap.class),
+                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMultimap.class)))
+                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsSingleton.class),
+                                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsSingleton.class)))
+                        .add(PTransformOverride.of(PTransformMatchers.classEqualTo(Combine.GloballyAsSingletonView.class),
+                               new ReflectiveOneToOneOverrideFactory((ViewTranslator.CombineGloballyAsSingletonView.class))))
+                        .build();
+        pipeline.replaceAll(transformOverrides);
+        pipeline.traverseTopologically(this);
+    }
+
+    @Override
+    public CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {
+        LOG.info(genSpaces(this.depth) + "enterCompositeTransform- " + node);
+        this.depth++;
+
+        // check if current composite transforms need to be translated. 
+        // If not, all sub transforms will be translated in visitPrimitiveTransform.
+        PTransform<?, ?> transform = node.getTransform();
+        if (transform != null) {
+            TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
+
+            if (translator != null && applyCanTranslate(transform, node, translator)) {
+                applyStreamingTransform(transform, node, translator);
+                LOG.info(genSpaces(this.depth) + "translated-" + node);
+                return CompositeBehavior.DO_NOT_ENTER_TRANSFORM;
+            }
+        }
+        return CompositeBehavior.ENTER_TRANSFORM;
+    }
+
+    public void leaveCompositeTransform(TransformHierarchy.Node node) {
+        this.depth--;
+        LOG.info(genSpaces(this.depth) + "leaveCompositeTransform- " + node);
+    }
+
+    public void visitPrimitiveTransform(TransformHierarchy.Node node) {
+        LOG.info(genSpaces(this.depth) + "visitPrimitiveTransform- " + node);
+
+        if (!node.isRootNode()) {
+            PTransform<?, ?> transform = node.getTransform();
+            TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
+            if (translator == null || !applyCanTranslate(transform, node, translator)) {
+                LOG.info(node.getTransform().getClass().toString());
+                throw new UnsupportedOperationException("The transform " + transform + " is currently not supported.");
+            }
+            applyStreamingTransform(transform, node, translator);
+        }
+    }
+
+    public void visitValue(PValue value, TransformHierarchy.Node node) {
+        LOG.info(genSpaces(this.depth) + "visiting value {}", value);
+    }
+
+    private <T extends PTransform<?, ?>> void applyStreamingTransform(PTransform<?, ?> transform, TransformHierarchy.Node node,
+            TransformTranslator<?> translator) {
+        @SuppressWarnings("unchecked")
+        T typedTransform = (T) transform;
+        @SuppressWarnings("unchecked")
+        TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
+
+        context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
+        typedTranslator.translateNode(typedTransform, context);
+
+        // Maintain PValue to TupleTag map for side inputs translation.
+        context.getUserGraphContext().recordOutputTaggedPValue();
+    }
+
+    private <T extends PTransform<?, ?>> boolean applyCanTranslate(PTransform<?, ?> transform, TransformHierarchy.Node node, TransformTranslator<?> translator) {
+        @SuppressWarnings("unchecked")
+        T typedTransform = (T) transform;
+        @SuppressWarnings("unchecked")
+        TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
+
+        context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
+
+        return typedTranslator.canTranslate(typedTransform, context);
+    }
+
+    /**
+     * Utility formatting method.
+     * 
+     * @param n number of spaces to generate
+     * @return String with "|" followed by n spaces
+     */
+    protected static String genSpaces(int n) {
+        StringBuilder builder = new StringBuilder();
+        for (int i = 0; i < n; i++) {
+            builder.append("|   ");
+        }
+        return builder.toString();
+    }
+
+    private static class ReflectiveOneToOneOverrideFactory<
+            InputT extends PValue,
+            OutputT extends PValue,
+            TransformT extends PTransform<InputT, OutputT>>
+            extends SingleInputOutputOverrideFactory<InputT, OutputT, TransformT> {
+        private final Class<PTransform<InputT, OutputT>> replacement;
+
+        private ReflectiveOneToOneOverrideFactory(
+                Class<PTransform<InputT, OutputT>> replacement) {
+            this.replacement = replacement;
+        }
+
+        @Override
+        public PTransformReplacement<InputT, OutputT> getReplacementTransform(AppliedPTransform<InputT, OutputT, TransformT> appliedPTransform) {
+            PTransform<InputT, OutputT> originalPTransform = appliedPTransform.getTransform();
+            PTransform<InputT, OutputT> replacedPTransform = InstanceBuilder.ofType(replacement)
+                    .withArg((Class<PTransform<InputT, OutputT>>) originalPTransform.getClass(), originalPTransform)
+                    .build();
+            InputT inputT = (InputT) Iterables.getOnlyElement(appliedPTransform.getInputs().values());
+            return PTransformReplacement.of(inputT, replacedPTransform);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
new file mode 100644
index 0000000..bf4515f
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
@@ -0,0 +1,424 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import avro.shaded.com.google.common.collect.Lists;
+import org.apache.beam.runners.jstorm.translation.translator.Stream;
+import org.apache.beam.runners.jstorm.util.RunnerUtils;
+import com.google.common.base.Strings;
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import org.apache.beam.runners.jstorm.StormPipelineOptions;
+import org.apache.beam.runners.jstorm.translation.runtime.AdaptorBasicSpout;
+import org.apache.beam.runners.jstorm.translation.runtime.Executor;
+import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
+import org.apache.beam.sdk.runners.AppliedPTransform;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.PValueBase;
+import org.apache.beam.sdk.values.TaggedPValue;
+import org.apache.beam.sdk.values.TupleTag;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.beam.runners.jstorm.translation.runtime.ExecutorsBolt;
+
+import java.util.*;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
+/**
+ * Maintains the state necessary during Pipeline translation to build a Storm topology.
+ */
+public class TranslationContext {
+    private static final Logger LOG = LoggerFactory.getLogger(TranslationContext.class);
+
+    private final UserGraphContext userGraphContext;
+    private final ExecutionGraphContext executionGraphContext;
+
+    public TranslationContext(StormPipelineOptions options) {
+        this.userGraphContext = new UserGraphContext(options);
+        this.executionGraphContext = new ExecutionGraphContext();
+    }
+
+    public ExecutionGraphContext getExecutionGraphContext() {
+        return executionGraphContext;
+    }
+
+    public UserGraphContext getUserGraphContext() {
+        return userGraphContext;
+    }
+
+    private void addStormStreamDef(TaggedPValue input, String destComponentName, Stream.Grouping grouping) {
+        Stream.Producer producer = executionGraphContext.getProducer(input.getValue());
+        if (!producer.getComponentId().equals(destComponentName)) {
+            Stream.Consumer consumer = Stream.Consumer.of(destComponentName, grouping);
+            executionGraphContext.registerStreamConsumer(consumer, producer);
+
+            ExecutorsBolt executorsBolt = executionGraphContext.getBolt(producer.getComponentId());
+            if (executorsBolt != null) {
+                executorsBolt.addExternalOutputTag(input.getTag());
+            }
+        }
+    }
+
+    private String getUpstreamExecutorsBolt() {
+        for (PValue value : userGraphContext.getInputs().values()) {
+            String componentId = executionGraphContext.getProducerComponentId(value);
+            if (componentId != null && executionGraphContext.getBolt(componentId) != null) {
+                return componentId;
+            }
+        }
+        // When upstream component is spout, "null" will be return.
+        return null;
+    }
+
+    /**
+     * check if the current transform is applied to source collection.
+     * @return
+     */
+    private boolean connectedToSource() {
+        for (PValue value : userGraphContext.getInputs().values()) {
+            if (executionGraphContext.producedBySpout(value)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * @param upstreamExecutorsBolt
+     * @return true if there is multiple input streams, or upstream executor output the same stream
+     *          to different executors
+     */
+    private boolean isMultipleInputOrOutput(ExecutorsBolt upstreamExecutorsBolt, Map<TupleTag<?>, PValue> inputs) {
+        if (inputs.size() > 1) {
+            return true;
+        } else {
+            final Sets.SetView<TupleTag> intersection = Sets.intersection(upstreamExecutorsBolt.getExecutors().keySet(), inputs.keySet());
+            if (!intersection.isEmpty()) {
+                // there is already a different executor consume the same input
+                return true;
+            } else {
+                return false;
+            }
+        }
+    }
+
+    public void addTransformExecutor(Executor executor) {
+        addTransformExecutor(executor, Collections.EMPTY_LIST);
+    }
+
+    public void addTransformExecutor(Executor executor, List<PValue> sideInputs) {
+        addTransformExecutor(executor, userGraphContext.getInputs(), userGraphContext.getOutputs(), sideInputs);
+    }
+
+    public void addTransformExecutor(Executor executor, Map<TupleTag<?>, PValue> inputs, Map<TupleTag<?>, PValue> outputs) {
+        addTransformExecutor(executor, inputs, outputs, Collections.EMPTY_LIST);
+    }
+
+    public void addTransformExecutor(Executor executor, Map<TupleTag<?>, PValue> inputs, Map<TupleTag<?>, PValue> outputs, List<PValue> sideInputs) {
+        String name = null;
+
+        ExecutorsBolt bolt = null;
+
+        boolean isGBK = false;
+        /**
+         * Check if the transform executor needs to be chained into an existing ExecutorsBolt.
+         * For following cases, a new bolt is created for the specified executor, otherwise the executor
+         * will be added into the bolt contains corresponding upstream executor.
+         * a) it is a GroupByKey executor
+         * b) it is connected to source directly
+         * c) None existing upstream bolt was found
+         * d) For the purpose of performance to reduce the side effects between multiple streams which
+         *    is output to same executor, a new bolt will be created.
+         */
+        if (RunnerUtils.isGroupByKeyExecutor(executor)) {
+            bolt = new ExecutorsBolt();
+            name = executionGraphContext.registerBolt(bolt);
+            isGBK = true;
+        } else if (connectedToSource()) {
+            bolt = new ExecutorsBolt();
+            name = executionGraphContext.registerBolt(bolt);
+        } else {
+            name = getUpstreamExecutorsBolt();
+            if (name == null) {
+                bolt = new ExecutorsBolt();
+                name = executionGraphContext.registerBolt(bolt);
+            } else {
+                bolt = executionGraphContext.getBolt(name);
+                if (isMultipleInputOrOutput(bolt, inputs)) {
+                    bolt = new ExecutorsBolt();
+                    name = executionGraphContext.registerBolt(bolt);
+                }
+            }
+        }
+
+        // update the output tags of current transform into ExecutorsBolt
+        for (Map.Entry<TupleTag<?>, PValue> entry : outputs.entrySet()) {
+            TupleTag tag = entry.getKey();
+            PValue value = entry.getValue();
+
+            // use tag of PValueBase
+            if (value instanceof PValueBase) {
+                tag = ((PValueBase) value).expand().keySet().iterator().next();
+            }
+            executionGraphContext.registerStreamProducer(
+                    TaggedPValue.of(tag, value),
+                    Stream.Producer.of(name, tag.getId(), value.getName()));
+            //bolt.addOutputTags(tag);
+        }
+
+        // add the transform executor into the chain of ExecutorsBolt
+        for (Map.Entry<TupleTag<?>, PValue> entry : inputs.entrySet()) {
+            TupleTag tag = entry.getKey();
+            PValue value = entry.getValue();
+            bolt.addExecutor(tag, executor);
+
+            // filter all connections inside bolt
+            //if (!bolt.getOutputTags().contains(tag)) {
+                Stream.Grouping grouping;
+                if (isGBK) {
+                    grouping = Stream.Grouping.byFields(Arrays.asList(CommonInstance.KEY));
+                } else {
+                    grouping = Stream.Grouping.of(Stream.Grouping.Type.LOCAL_OR_SHUFFLE);
+                }
+                addStormStreamDef(TaggedPValue.of(tag, value), name, grouping);
+            //}
+        }
+
+        for (PValue sideInput : sideInputs) {
+            TupleTag tag = userGraphContext.findTupleTag(sideInput);
+            bolt.addExecutor(tag, executor);
+            checkState(!bolt.getOutputTags().contains(tag));
+            addStormStreamDef(TaggedPValue.of(tag, sideInput), name, Stream.Grouping.of(Stream.Grouping.Type.ALL));
+        }
+
+        bolt.registerExecutor(executor);
+
+        // set parallelismNumber
+        String pTransformfullName = userGraphContext.currentTransform.getFullName();
+        String compositeName = pTransformfullName.split("/")[0];
+        Map parallelismNumMap = userGraphContext.getOptions().getParallelismNumMap();
+        if (parallelismNumMap.containsKey(compositeName)) {
+            int configNum = (Integer) parallelismNumMap.get(compositeName);
+            int currNum = bolt.getParallelismNum();
+            bolt.setParallelismNum(Math.max(configNum, currNum));
+        }
+    }
+
+    // TODO: add getSideInputs() and getSideOutputs().
+    public static class UserGraphContext {
+        private final StormPipelineOptions options;
+        private final Map<PValue, TupleTag> pValueToTupleTag;
+        private AppliedPTransform<?, ?, ?> currentTransform = null;
+
+        private boolean isWindowed = false;
+
+        public UserGraphContext(StormPipelineOptions options) {
+            this.options = checkNotNull(options, "options");
+            this.pValueToTupleTag = Maps.newHashMap();
+        }
+
+        public StormPipelineOptions getOptions() {
+            return this.options;
+        }
+
+        public void setCurrentTransform(AppliedPTransform<?, ?, ?> transform) {
+            this.currentTransform = transform;
+        }
+
+        public String getStepName() {
+            return currentTransform.getFullName();
+        }
+
+        public <T extends PValue> T getInput() {
+            return (T) currentTransform.getInputs().values().iterator().next();
+        }
+
+        public Map<TupleTag<?>, PValue> getInputs() {
+            return currentTransform.getInputs();
+        }
+
+        public TupleTag<?> getInputTag() {
+            return currentTransform.getInputs().keySet().iterator().next();
+        }
+
+        public List<TupleTag<?>> getInputTags() {
+            return Lists.newArrayList(currentTransform.getInputs().keySet());
+        }
+
+        public <T extends PValue> T getOutput() {
+            return (T) currentTransform.getOutputs().values().iterator().next();
+        }
+
+        public Map<TupleTag<?>, PValue> getOutputs() {
+            return currentTransform.getOutputs();
+        }
+
+        public TupleTag<?> getOutputTag() {
+            return currentTransform.getOutputs().keySet().iterator().next();
+        }
+
+        public List<TupleTag<?>> getOutputTags() {
+            return Lists.newArrayList(currentTransform.getOutputs().keySet());
+        }
+
+        public void recordOutputTaggedPValue() {
+            for (Map.Entry<TupleTag<?>, PValue> entry : getOutputs().entrySet()) {
+                pValueToTupleTag.put(entry.getValue(), entry.getKey());
+            }
+        }
+
+        public <T> TupleTag<T> findTupleTag(PValue pValue) {
+            return pValueToTupleTag.get(checkNotNull(pValue, "pValue"));
+        }
+
+        public void setWindowed() {
+            this.isWindowed = true;
+        }
+
+        public boolean isWindowed() {
+            return this.isWindowed;
+        }
+
+        @Override
+        public String toString() {
+            return Joiner.on('\n').join(FluentIterable.from(pValueToTupleTag.entrySet())
+                    .transform(new Function<Map.Entry<PValue,TupleTag>, String>() {
+                        @Override
+                        public String apply(Map.Entry<PValue, TupleTag> entry) {
+                            return String.format("%s == %s", entry.getValue().getId(), entry.getKey().getName());
+                        }}));
+        }
+    }
+
+    public static class ExecutionGraphContext {
+
+        private final Map<String, AdaptorBasicSpout> spoutMap = new HashMap<>();
+        private final Map<String, ExecutorsBolt> boltMap = new HashMap<>();
+
+        // One-to-one mapping between Stream.Producer and TaggedPValue (or PValue).
+        private final Map<PValue, Stream.Producer> pValueToProducer = new HashMap<>();
+        private final Map<Stream.Producer, TaggedPValue> producerToTaggedPValue = new HashMap<>();
+
+        private final List<Stream> streams = new ArrayList<>();
+
+        private int id = 1;
+
+        public void registerSpout(AdaptorBasicSpout spout, TaggedPValue output) {
+            checkNotNull(spout, "spout");
+            checkNotNull(output, "output");
+            String name = "spout" + genId();
+            this.spoutMap.put(name, spout);
+            registerStreamProducer(
+                    output,
+                    Stream.Producer.of(name, output.getTag().getId(), output.getValue().getName()));
+        }
+
+        public AdaptorBasicSpout getSpout(String id) {
+            if (Strings.isNullOrEmpty(id)) {
+                return null;
+            }
+            return this.spoutMap.get(id);
+        }
+
+        public Map<String, AdaptorBasicSpout> getSpouts() {
+            return this.spoutMap;
+        }
+
+        public String registerBolt(ExecutorsBolt bolt) {
+            checkNotNull(bolt, "bolt");
+            String name = "bolt" + genId();
+            this.boltMap.put(name, bolt);
+            return name;
+        }
+
+        public ExecutorsBolt getBolt(String id) {
+            if (Strings.isNullOrEmpty(id)) {
+                return null;
+            }
+            return this.boltMap.get(id);
+        }
+
+        public void registerStreamProducer(TaggedPValue taggedPValue, Stream.Producer producer) {
+            checkNotNull(taggedPValue, "taggedPValue");
+            checkNotNull(producer, "producer");
+            pValueToProducer.put(taggedPValue.getValue(), producer);
+            producerToTaggedPValue.put(producer, taggedPValue);
+        }
+
+        public Stream.Producer getProducer(PValue pValue) {
+            return pValueToProducer.get(checkNotNull(pValue, "pValue"));
+        }
+
+        public String getProducerComponentId(PValue pValue) {
+            Stream.Producer producer = getProducer(pValue);
+            return producer == null ? null : producer.getComponentId();
+        }
+
+        public boolean producedBySpout(PValue pValue) {
+            String componentId = getProducerComponentId(pValue);
+            return getSpout(componentId) != null;
+        }
+
+        public void registerStreamConsumer(Stream.Consumer consumer, Stream.Producer producer) {
+            streams.add(Stream.of(
+                    checkNotNull(producer, "producer"),
+                    checkNotNull(consumer, "consumer")));
+        }
+
+        public Map<PValue, Stream.Producer> getPValueToProducers() {
+            return pValueToProducer;
+        }
+
+        public Iterable<Stream> getStreams() {
+            return streams;
+        }
+
+        @Override
+        public String toString() {
+            List<String> ret = new ArrayList<>();
+            ret.add("SPOUT");
+            for (Map.Entry<String, AdaptorBasicSpout> entry : spoutMap.entrySet()) {
+                ret.add(entry.getKey() + ": " + entry.getValue().toString());
+            }
+            ret.add("BOLT");
+            for (Map.Entry<String, ExecutorsBolt> entry : boltMap.entrySet()) {
+                ret.add(entry.getKey() + ": " + entry.getValue().toString());
+            }
+            ret.add("STREAM");
+            for (Stream stream : streams) {
+                ret.add(String.format(
+                        "%s@@%s ---> %s@@%s",
+                        stream.getProducer().getStreamId(),
+                        stream.getProducer().getComponentId(),
+                        stream.getConsumer().getGrouping(),
+                        stream.getConsumer().getComponentId()));
+            }
+            return Joiner.on("\n").join(ret);
+        }
+
+        private synchronized int genId() {
+            return id++;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
new file mode 100644
index 0000000..0f856cf
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import com.alibaba.jstorm.beam.translation.translator.*;
+import org.apache.beam.runners.jstorm.translation.translator.BoundedSourceTranslator;
+import org.apache.beam.runners.jstorm.translation.translator.FlattenTranslator;
+import org.apache.beam.runners.jstorm.translation.translator.GroupByKeyTranslator;
+import org.apache.beam.runners.jstorm.translation.translator.ParDoBoundMultiTranslator;
+import org.apache.beam.runners.jstorm.translation.translator.ParDoBoundTranslator;
+import org.apache.beam.runners.jstorm.translation.translator.TransformTranslator;
+import org.apache.beam.runners.jstorm.translation.translator.UnboundedSourceTranslator;
+import org.apache.beam.runners.jstorm.translation.translator.ViewTranslator;
+import org.apache.beam.runners.jstorm.translation.translator.WindowAssignTranslator;
+import org.apache.beam.sdk.io.Read;
+import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.windowing.Window;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Lookup table mapping PTransform types to associated TransformTranslator implementations.
+ */
+public class TranslatorRegistry {
+    private static final Logger LOG = LoggerFactory.getLogger(TranslatorRegistry.class);
+
+    private static final Map<Class<? extends PTransform>, TransformTranslator> TRANSLATORS = new HashMap<>();
+
+    static {
+        TRANSLATORS.put(Read.Bounded.class, new BoundedSourceTranslator());
+        TRANSLATORS.put(Read.Unbounded.class, new UnboundedSourceTranslator());
+        // TRANSLATORS.put(Write.Bound.class, new WriteSinkStreamingTranslator());
+        // TRANSLATORS.put(TextIO.Write.Bound.class, new TextIOWriteBoundStreamingTranslator());
+
+        TRANSLATORS.put(ParDo.SingleOutput.class, new ParDoBoundTranslator());
+        TRANSLATORS.put(ParDo.MultiOutput.class, new ParDoBoundMultiTranslator());
+
+        //TRANSLATORS.put(Window.Bound.class, new WindowBoundTranslator<>());
+        TRANSLATORS.put(Window.Assign.class, new WindowAssignTranslator<>());
+
+        TRANSLATORS.put(Flatten.PCollections.class, new FlattenTranslator());
+
+        TRANSLATORS.put(GroupByKey.class, new GroupByKeyTranslator());
+
+        TRANSLATORS.put(ViewTranslator.CreateJStormPCollectionView.class, new ViewTranslator());
+
+        /**
+         * Currently, empty translation is required for combine and reshuffle. Because, the transforms will be 
+         * mapped to GroupByKey and Pardo finally. So we only need to translator the finally transforms.
+         * If any improvement is required, the composite transforms will be translated in the future.
+         */
+        // TRANSLATORS.put(Combine.PerKey.class, new CombinePerKeyTranslator());
+        // TRANSLATORS.put(Globally.class, new CombineGloballyTranslator());
+        // TRANSLATORS.put(Reshuffle.class, new ReshuffleTranslator());
+    }
+
+    public static TransformTranslator<?> getTranslator(PTransform<?, ?> transform) {
+        TransformTranslator<?> translator = TRANSLATORS.get(transform.getClass());
+        if (translator == null) {
+            LOG.warn("Unsupported operator={}", transform.getClass().getName());
+        }
+        return translator;
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
new file mode 100644
index 0000000..b07b426
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
+
+import backtype.storm.topology.IComponent;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+
+/*
+ * Enable user to add output stream definitions by API, rather than hard-code.
+ */
+public abstract class AbstractComponent implements IComponent {
+    private Map<String, Fields> streamToFields = new HashMap<>();
+    private Map<String, Boolean> keyStreams = new HashMap<>();
+    private int parallelismNum = 0;
+
+    public void addOutputField(String streamId) {
+        addOutputField(streamId, new Fields(CommonInstance.VALUE));
+    }
+
+    public void addOutputField(String streamId, Fields fields) {
+        streamToFields.put(streamId, fields);
+        keyStreams.put(streamId, false);
+    }
+
+    public void addKVOutputField(String streamId) {
+        streamToFields.put(streamId, new Fields(CommonInstance.KEY, CommonInstance.VALUE));
+        keyStreams.put(streamId, true);
+    }
+
+    @Override
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        for (Map.Entry<String, Fields> entry : streamToFields.entrySet()) {
+            declarer.declareStream(entry.getKey(), entry.getValue());
+        }
+    }
+
+    public boolean keyedEmit(String streamId) {
+        Boolean isKeyedStream = keyStreams.get(streamId);
+        return isKeyedStream == null ? false : isKeyedStream;
+    }
+
+    public int getParallelismNum() {
+        return parallelismNum;
+    }
+
+    public void setParallelismNum(int num) {
+        parallelismNum = num;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
new file mode 100644
index 0000000..91881f2
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import backtype.storm.topology.IRichBatchBolt;
+
+public abstract class AdaptorBasicBolt extends AbstractComponent implements IRichBatchBolt {
+    
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
new file mode 100644
index 0000000..5a0c6ec
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import backtype.storm.topology.IRichSpout;
+
+public abstract class AdaptorBasicSpout extends AbstractComponent implements IRichSpout {
+    
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
new file mode 100644
index 0000000..2bf3303
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
@@ -0,0 +1,328 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import java.io.Serializable;
+import java.util.*;
+
+import avro.shaded.com.google.common.collect.Iterables;
+import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
+import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
+
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.alibaba.jstorm.metric.MetricClient;
+import org.apache.beam.runners.core.DoFnRunner;
+import org.apache.beam.runners.core.DoFnRunners;
+import org.apache.beam.runners.core.DoFnRunners.OutputManager;
+import org.apache.beam.runners.core.PushbackSideInputDoFnRunner;
+import org.apache.beam.runners.core.SideInputHandler;
+import org.apache.beam.runners.core.SimplePushbackSideInputDoFnRunner;
+import org.apache.beam.runners.core.StateInternals;
+import org.apache.beam.runners.core.StateTag;
+import org.apache.beam.runners.core.StateTags;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.runners.core.StateNamespaces;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.reflect.DoFnInvoker;
+import org.apache.beam.sdk.transforms.reflect.DoFnInvokers;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.runners.core.NullSideInputReader;
+import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.WindowingStrategy;
+import org.apache.beam.sdk.state.BagState;
+import org.apache.beam.sdk.state.WatermarkHoldState;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.joda.time.Instant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.beam.runners.jstorm.StormPipelineOptions;
+import org.apache.beam.runners.jstorm.translation.util.DefaultStepContext;
+import org.apache.beam.runners.jstorm.util.SerializedPipelineOptions;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+public class DoFnExecutor<InputT, OutputT> implements Executor {
+    private static final long serialVersionUID = 5297603063991078668L;
+
+    private static final Logger LOG = LoggerFactory.getLogger(DoFnExecutor.class);
+
+    public class DoFnExecutorOutputManager implements OutputManager, Serializable {
+        private static final long serialVersionUID = -661113364735206170L;
+
+        @Override
+        public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
+            executorsBolt.processExecutorElem(tag, output);
+        }
+    }
+
+    protected transient DoFnRunner<InputT, OutputT> runner = null;
+    protected transient PushbackSideInputDoFnRunner<InputT, OutputT> pushbackRunner = null;
+
+    protected final String stepName;
+
+    protected int internalDoFnExecutorId;
+
+    protected final String description;
+
+    protected final TupleTag<OutputT> mainTupleTag;
+    protected final List<TupleTag<?>> sideOutputTags;
+
+    protected SerializedPipelineOptions serializedOptions;
+    protected transient StormPipelineOptions pipelineOptions;
+
+    protected DoFn<InputT, OutputT> doFn;
+    protected final Coder<WindowedValue<InputT>> inputCoder;
+    protected DoFnInvoker<InputT, OutputT> doFnInvoker;
+    protected OutputManager outputManager;
+    protected WindowingStrategy<?, ?> windowingStrategy;
+    protected final TupleTag<InputT> mainInputTag;
+    protected Collection<PCollectionView<?>> sideInputs;
+    protected SideInputHandler sideInputHandler;
+    protected final Map<TupleTag, PCollectionView<?>> sideInputTagToView;
+
+    // Initialize during runtime
+    protected ExecutorContext executorContext;
+    protected ExecutorsBolt executorsBolt;
+    protected TimerInternals timerInternals;
+    protected transient StateInternals pushbackStateInternals;
+    protected transient StateTag<BagState<WindowedValue<InputT>>> pushedBackTag;
+    protected transient StateTag<WatermarkHoldState> watermarkHoldTag;
+    protected transient IKvStoreManager kvStoreManager;
+    protected DefaultStepContext stepContext;
+    protected transient MetricClient metricClient;
+
+    public DoFnExecutor(
+            String stepName,
+            String description,
+            StormPipelineOptions pipelineOptions,
+            DoFn<InputT, OutputT> doFn,
+            Coder<WindowedValue<InputT>> inputCoder,
+            WindowingStrategy<?, ?> windowingStrategy,
+            TupleTag<InputT> mainInputTag,
+            Collection<PCollectionView<?>> sideInputs,
+            Map<TupleTag, PCollectionView<?>> sideInputTagToView,
+            TupleTag<OutputT> mainTupleTag,
+            List<TupleTag<?>> sideOutputTags) {
+        this.stepName = checkNotNull(stepName, "stepName");
+        this.description = checkNotNull(description, "description");
+        this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
+        this.doFn = doFn;
+        this.inputCoder = inputCoder;
+        this.outputManager = new DoFnExecutorOutputManager();
+        this.windowingStrategy = windowingStrategy;
+        this.mainInputTag = mainInputTag;
+        this.sideInputs = sideInputs;
+        this.mainTupleTag = mainTupleTag;
+        this.sideOutputTags = sideOutputTags;
+        this.sideInputTagToView = sideInputTagToView;
+    }
+
+    protected DoFnRunner<InputT, OutputT> getDoFnRunner() {
+        return new DoFnRunnerWithMetrics<>(
+            stepName,
+            DoFnRunners.simpleRunner(
+                this.pipelineOptions,
+                this.doFn,
+                this.sideInputHandler == null ? NullSideInputReader.empty() : sideInputHandler,
+                this.outputManager,
+                this.mainTupleTag,
+                this.sideOutputTags,
+                this.stepContext,
+                this.windowingStrategy),
+            MetricsReporter.create(metricClient));
+    }
+
+    protected void initService(ExecutorContext context) {
+        // TODO: what should be set for key in here?
+        timerInternals = new JStormTimerInternals(null /* key */, this, context.getExecutorsBolt().timerService());
+        kvStoreManager = context.getKvStoreManager();
+        stepContext = new DefaultStepContext(timerInternals,
+                new JStormStateInternals(null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+        metricClient = new MetricClient(executorContext.getTopologyContext());
+    }
+
+    @Override
+    public void init(ExecutorContext context) {
+        this.executorContext = context;
+        this.executorsBolt = context.getExecutorsBolt();
+        this.pipelineOptions = this.serializedOptions.getPipelineOptions().as(StormPipelineOptions.class);
+
+        initService(context);
+
+        // Side inputs setup
+        if (sideInputs != null && sideInputs.isEmpty() == false) {
+            pushedBackTag = StateTags.bag("pushed-back-values", inputCoder);
+            watermarkHoldTag =
+                    StateTags.watermarkStateInternal("hold", TimestampCombiner.EARLIEST);
+            pushbackStateInternals = new JStormStateInternals(null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
+            sideInputHandler = new SideInputHandler(sideInputs, pushbackStateInternals);
+            runner = getDoFnRunner();
+            pushbackRunner = SimplePushbackSideInputDoFnRunner.create(runner, sideInputs, sideInputHandler);
+        } else {
+            runner = getDoFnRunner();
+        }
+
+        // Process user's setup
+        doFnInvoker = DoFnInvokers.invokerFor(doFn);
+        doFnInvoker.invokeSetup();
+    }
+
+    @Override
+    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+        LOG.debug(String.format("process: elemTag=%s, mainInputTag=%s, sideInputs=%s, elem={}",
+                tag, mainInputTag, sideInputs, elem.getValue()));
+        if (mainInputTag.equals(tag)) {
+            processMainInput(elem);
+        } else {
+            processSideInput(tag, elem);
+        }
+    }
+
+    protected <T> void processMainInput(WindowedValue<T> elem) {
+       if (sideInputs.isEmpty()) {
+           runner.processElement((WindowedValue<InputT>) elem);
+       } else {
+           Iterable<WindowedValue<InputT>> justPushedBack =
+               pushbackRunner.processElementInReadyWindows((WindowedValue<InputT>) elem);
+           BagState<WindowedValue<InputT>> pushedBack =
+                   pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
+
+           Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
+           for (WindowedValue<InputT> pushedBackValue : justPushedBack) {
+               if (pushedBackValue.getTimestamp().isBefore(min)) {
+                   min = pushedBackValue.getTimestamp();
+               }
+               min = earlier(min, pushedBackValue.getTimestamp());
+               pushedBack.add(pushedBackValue);
+           }
+           pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag).add(min);
+       }
+    }
+
+    protected void processSideInput(TupleTag tag, WindowedValue elem) {
+        LOG.debug(String.format("side inputs: %s, %s.", tag, elem));
+
+        PCollectionView<?> sideInputView = sideInputTagToView.get(tag);
+        sideInputHandler.addSideInputValue(sideInputView, elem);
+
+        BagState<WindowedValue<InputT>> pushedBack =
+                pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
+
+        List<WindowedValue<InputT>> newPushedBack = new ArrayList<>();
+
+        Iterable<WindowedValue<InputT>> pushedBackInputs = pushedBack.read();
+        if (pushedBackInputs != null) {
+            for (WindowedValue<InputT> input : pushedBackInputs) {
+
+                Iterable<WindowedValue<InputT>> justPushedBack =
+                        pushbackRunner.processElementInReadyWindows(input);
+                Iterables.addAll(newPushedBack, justPushedBack);
+            }
+        }
+        pushedBack.clear();
+
+        Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
+        for (WindowedValue<InputT> pushedBackValue : newPushedBack) {
+            min = earlier(min, pushedBackValue.getTimestamp());
+            pushedBack.add(pushedBackValue);
+        }
+
+        WatermarkHoldState watermarkHold =
+                pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
+        // TODO: clear-then-add is not thread-safe.
+        watermarkHold.clear();
+        watermarkHold.add(min);
+    }
+
+    /**
+     * Process all pushed back elements when receiving watermark with max timestamp
+     */
+    public void processAllPushBackElements() {
+        if (sideInputs != null && sideInputs.isEmpty() == false) {
+            BagState<WindowedValue<InputT>> pushedBackElements =
+                    pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
+            if (pushedBackElements != null) {
+                for (WindowedValue<InputT> elem : pushedBackElements.read()) {
+                    LOG.info("Process pushback elem={}", elem);
+                    runner.processElement(elem);
+                }
+                pushedBackElements.clear();
+            }
+
+            WatermarkHoldState watermarkHold =
+                    pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
+            watermarkHold.clear();
+            watermarkHold.add(BoundedWindow.TIMESTAMP_MAX_VALUE);
+        }
+    }
+
+    public void onTimer(Object key, TimerInternals.TimerData timerData) {
+        StateNamespace namespace = timerData.getNamespace();
+        checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
+        BoundedWindow window = ((StateNamespaces.WindowNamespace) namespace).getWindow();
+        if (pushbackRunner != null) {
+            pushbackRunner.onTimer(timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
+        } else {
+            runner.onTimer(timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
+        }
+    }
+
+    @Override
+    public void cleanup() {
+        doFnInvoker.invokeTeardown();
+    }
+
+    @Override
+    public String toString() {
+        return description;
+    }
+
+    private Instant earlier(Instant left, Instant right) {
+        return left.isBefore(right) ? left : right;
+    }
+
+    public void startBundle() {
+        if (pushbackRunner != null) {
+            pushbackRunner.startBundle();
+        } else {
+            runner.startBundle();
+        }
+    }
+
+    public void finishBundle() {
+        if (pushbackRunner != null) {
+            pushbackRunner.finishBundle();
+        } else {
+            runner.finishBundle();
+        }
+    }
+
+    public void setInternalDoFnExecutorId(int id) {
+        this.internalDoFnExecutorId = id;
+    }
+
+    public int getInternalDoFnExecutorId() {
+        return internalDoFnExecutorId;
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java
new file mode 100644
index 0000000..98dbcc5
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import java.io.Closeable;
+import java.io.IOException;
+import org.apache.beam.runners.core.DoFnRunner;
+import org.apache.beam.sdk.metrics.MetricsContainer;
+import org.apache.beam.sdk.metrics.MetricsEnvironment;
+import org.apache.beam.sdk.state.TimeDomain;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.joda.time.Instant;
+
+/**
+ * DoFnRunner decorator which registers {@link MetricsContainer}.
+ */
+public class DoFnRunnerWithMetrics<InputT, OutputT> implements DoFnRunner<InputT, OutputT> {
+
+  private final String stepName;
+  private final DoFnRunner<InputT, OutputT> delegate;
+  private final MetricsReporter metricsReporter;
+
+  DoFnRunnerWithMetrics(
+      String stepName,
+      DoFnRunner<InputT, OutputT> delegate,
+      MetricsReporter metricsReporter) {
+    this.stepName = checkNotNull(stepName, "stepName");
+    this.delegate = checkNotNull(delegate, "delegate");
+    this.metricsReporter = checkNotNull(metricsReporter, "metricsReporter");
+  }
+
+  @Override
+  public void startBundle() {
+    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
+        metricsReporter.getMetricsContainer(stepName))) {
+      delegate.startBundle();
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public void processElement(WindowedValue<InputT> elem) {
+    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
+        metricsReporter.getMetricsContainer(stepName))) {
+      delegate.processElement(elem);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public void onTimer(String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) {
+    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
+        metricsReporter.getMetricsContainer(stepName))) {
+      delegate.onTimer(timerId, window, timestamp, timeDomain);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public void finishBundle() {
+    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
+        metricsReporter.getMetricsContainer(stepName))) {
+      delegate.finishBundle();
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+    metricsReporter.updateMetrics();
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
new file mode 100644
index 0000000..d7214db
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import java.io.Serializable;
+
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+public interface Executor extends Serializable {
+    /**
+     * Initialization during runtime
+     */
+    void init(ExecutorContext context);
+
+    <T> void  process(TupleTag<T> tag, WindowedValue<T> elem);
+
+    void cleanup();
+}
\ No newline at end of file


[29/53] [abbrv] beam git commit: jstorm-runner: update test runner of integration test in POM

Posted by pe...@apache.org.
jstorm-runner: update test runner of integration test in POM


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/df154de2
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/df154de2
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/df154de2

Branch: refs/heads/jstorm-runner
Commit: df154de20a216de4b997e0821dcf64cd553965ac
Parents: 9abbbd0
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Fri Jul 14 16:53:08 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:58 2017 +0800

----------------------------------------------------------------------
 runners/jstorm/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/df154de2/runners/jstorm/pom.xml
----------------------------------------------------------------------
diff --git a/runners/jstorm/pom.xml b/runners/jstorm/pom.xml
index 939f789..cdfaafb 100644
--- a/runners/jstorm/pom.xml
+++ b/runners/jstorm/pom.xml
@@ -73,7 +73,7 @@
                   <systemPropertyVariables>
                     <beamTestPipelineOptions>
                       [
-                        "--runner=com.alibaba.jstorm.beam.TestJStormRunner"
+                        "--runner=org.apache.beam.runners.jstorm.TestJStormRunner"
                       ]
                     </beamTestPipelineOptions>
                   </systemPropertyVariables>


[31/53] [abbrv] beam git commit: jstorm-runner: upgrade to Beam version 2.1.0-SNAPSHOT.

Posted by pe...@apache.org.
jstorm-runner: upgrade to Beam version 2.1.0-SNAPSHOT.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/a5af6d2f
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/a5af6d2f
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/a5af6d2f

Branch: refs/heads/jstorm-runner
Commit: a5af6d2f420adebaf18f0a9e7367d392327b60e2
Parents: 30f3eda
Author: Pei He <pe...@apache.org>
Authored: Tue Jul 18 14:29:19 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:58 2017 +0800

----------------------------------------------------------------------
 runners/jstorm/pom.xml | 5 -----
 1 file changed, 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/a5af6d2f/runners/jstorm/pom.xml
----------------------------------------------------------------------
diff --git a/runners/jstorm/pom.xml b/runners/jstorm/pom.xml
index cdfaafb..9808cd2 100644
--- a/runners/jstorm/pom.xml
+++ b/runners/jstorm/pom.xml
@@ -32,7 +32,6 @@
 
   <properties>
     <jstorm.core.version>2.5.0-SNAPSHOT</jstorm.core.version>
-    <beam.version>2.0.0</beam.version>
   </properties>
 
   <profiles>
@@ -99,7 +98,6 @@
     <dependency>
       <groupId>org.apache.beam</groupId>
       <artifactId>beam-sdks-java-core</artifactId>
-      <version>${beam.version}</version>
       <exclusions>
         <!-- jstorm uses logback binding -->
         <exclusion>
@@ -112,7 +110,6 @@
     <dependency>
       <groupId>org.apache.beam</groupId>
       <artifactId>beam-runners-core-java</artifactId>
-      <version>${beam.version}</version>
       <exclusions>
         <!-- jstorm uses logback binding -->
         <exclusion>
@@ -125,7 +122,6 @@
     <dependency>
       <groupId>org.apache.beam</groupId>
       <artifactId>beam-runners-core-construction-java</artifactId>
-      <version>${beam.version}</version>
       <exclusions>
         <!-- jstorm uses logback binding -->
         <exclusion>
@@ -150,7 +146,6 @@
     <dependency>
       <groupId>org.apache.beam</groupId>
       <artifactId>beam-sdks-java-core</artifactId>
-      <version>${beam.version}</version>
       <classifier>tests</classifier>
       <scope>test</scope>
       <exclusions>


[49/53] [abbrv] beam git commit: jstorm-runner: move jstorm ahead to get build result earlier.

Posted by pe...@apache.org.
jstorm-runner: move jstorm ahead to get build result earlier.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/26bcdf34
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/26bcdf34
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/26bcdf34

Branch: refs/heads/jstorm-runner
Commit: 26bcdf3492feae15cae61aa5325b07c44ce2a310
Parents: 00b9c5c
Author: Pei He <pe...@apache.org>
Authored: Fri Aug 18 17:26:53 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:03:01 2017 +0800

----------------------------------------------------------------------
 runners/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/26bcdf34/runners/pom.xml
----------------------------------------------------------------------
diff --git a/runners/pom.xml b/runners/pom.xml
index 0cdac02..36b8f22 100644
--- a/runners/pom.xml
+++ b/runners/pom.xml
@@ -36,11 +36,11 @@
     <module>core-construction-java</module>
     <module>core-java</module>
     <module>direct-java</module>
+    <module>jstorm</module>
     <module>flink</module>
     <module>google-cloud-dataflow-java</module>
     <module>spark</module>
     <module>apex</module>
-    <module>jstorm</module>
   </modules>
 
   <profiles>


[10/53] [abbrv] beam git commit: jstorm-runner: fix checkstyles.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
index 11c7c94..2a8160c 100644
--- a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
+++ b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,18 +17,27 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime.state;
 
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+
 import avro.shaded.com.google.common.collect.Maps;
-import org.apache.beam.runners.jstorm.translation.runtime.TimerServiceImpl;
 import com.alibaba.jstorm.cache.IKvStoreManager;
 import com.alibaba.jstorm.cache.rocksdb.RocksDbKvStoreManagerFactory;
 import com.alibaba.jstorm.utils.KryoSerializer;
-
+import java.util.Iterator;
+import java.util.Map;
 import org.apache.beam.runners.core.StateNamespaces;
 import org.apache.beam.runners.core.StateTags;
+import org.apache.beam.runners.jstorm.translation.runtime.TimerServiceImpl;
 import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.CoderRegistry;
-import org.apache.beam.sdk.state.*;
+import org.apache.beam.sdk.state.BagState;
+import org.apache.beam.sdk.state.CombiningState;
+import org.apache.beam.sdk.state.MapState;
+import org.apache.beam.sdk.state.ValueState;
+import org.apache.beam.sdk.state.WatermarkHoldState;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.Max;
 import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
@@ -40,180 +49,174 @@ import org.junit.rules.TemporaryFolder;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
-import java.util.Iterator;
-import java.util.Map;
-
-import static org.hamcrest.Matchers.containsInAnyOrder;
-import static org.hamcrest.Matchers.hasEntry;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-
 /**
  * Tests for {@link JStormStateInternals}.
  */
 @RunWith(JUnit4.class)
 public class JStormStateInternalsTest {
 
-    @Rule
-    public final TemporaryFolder tmp = new TemporaryFolder();
-
-    private JStormStateInternals<String> jstormStateInternals;
-
-    @Before
-    public void setup() throws Exception {
-        IKvStoreManager kvStoreManager = RocksDbKvStoreManagerFactory.getManager(
-                Maps.newHashMap(),
-                "test",
-                tmp.toString(),
-                new KryoSerializer(Maps.newHashMap()));
-        jstormStateInternals = new JStormStateInternals("key-1", kvStoreManager, new TimerServiceImpl(), 0);
-    }
-
-    @Test
-    public void testValueState() throws Exception {
-        ValueState<Integer> valueState = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
-        valueState.write(Integer.MIN_VALUE);
-        assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
-        valueState.write(Integer.MAX_VALUE);
-        assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
-    }
-
-    @Test
-    public void testValueStateIdenticalId() throws Exception {
-        ValueState<Integer> valueState = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
-        ValueState<Integer> valueStateIdentical = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
-
-        valueState.write(Integer.MIN_VALUE);
-        assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
-        assertEquals(Integer.MIN_VALUE, valueStateIdentical.read().longValue());
-        valueState.write(Integer.MAX_VALUE);
-        assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
-        assertEquals(Integer.MAX_VALUE, valueStateIdentical.read().longValue());
+  @Rule
+  public final TemporaryFolder tmp = new TemporaryFolder();
+
+  private JStormStateInternals<String> jstormStateInternals;
+
+  @Before
+  public void setup() throws Exception {
+    IKvStoreManager kvStoreManager = RocksDbKvStoreManagerFactory.getManager(
+        Maps.newHashMap(),
+        "test",
+        tmp.toString(),
+        new KryoSerializer(Maps.newHashMap()));
+    jstormStateInternals = new JStormStateInternals(
+        "key-1", kvStoreManager, new TimerServiceImpl(), 0);
+  }
+
+  @Test
+  public void testValueState() throws Exception {
+    ValueState<Integer> valueState = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
+    valueState.write(Integer.MIN_VALUE);
+    assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
+    valueState.write(Integer.MAX_VALUE);
+    assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
+  }
+
+  @Test
+  public void testValueStateIdenticalId() throws Exception {
+    ValueState<Integer> valueState = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
+    ValueState<Integer> valueStateIdentical = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
+
+    valueState.write(Integer.MIN_VALUE);
+    assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
+    assertEquals(Integer.MIN_VALUE, valueStateIdentical.read().longValue());
+    valueState.write(Integer.MAX_VALUE);
+    assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
+    assertEquals(Integer.MAX_VALUE, valueStateIdentical.read().longValue());
+  }
+
+  @Test
+  public void testBagState() throws Exception {
+    BagState<Integer> bagStateA = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
+    BagState<Integer> bagStateB = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.bag("state-id-b", BigEndianIntegerCoder.of()));
+
+    bagStateA.add(1);
+    bagStateA.add(0);
+    bagStateA.add(Integer.MAX_VALUE);
+
+    bagStateB.add(0);
+    bagStateB.add(Integer.MIN_VALUE);
+
+    Iterable<Integer> bagA = bagStateA.read();
+    Iterable<Integer> bagB = bagStateB.read();
+    assertThat(bagA, containsInAnyOrder(1, 0, Integer.MAX_VALUE));
+    assertThat(bagB, containsInAnyOrder(0, Integer.MIN_VALUE));
+
+    bagStateA.clear();
+    bagStateA.add(1);
+    bagStateB.add(0);
+    assertThat(bagStateA.read(), containsInAnyOrder(1));
+    assertThat(bagStateB.read(), containsInAnyOrder(0, 0, Integer.MIN_VALUE));
+  }
+
+  @Test
+  public void testCombiningState() throws Exception {
+    Combine.CombineFn<Integer, int[], Integer> combineFn = Max.ofIntegers();
+    Coder<int[]> accumCoder = combineFn.getAccumulatorCoder(
+        CoderRegistry.createDefault(), BigEndianIntegerCoder.of());
+
+    CombiningState<Integer, int[], Integer> combiningState = jstormStateInternals.state(
+        StateNamespaces.global(),
+        StateTags.combiningValue(
+            "state-id-a",
+            accumCoder,
+            combineFn));
+    assertEquals(Integer.MIN_VALUE, combiningState.read().longValue());
+    combiningState.add(10);
+    assertEquals(10, combiningState.read().longValue());
+    combiningState.add(1);
+    assertEquals(10, combiningState.read().longValue());
+    combiningState.add(Integer.MAX_VALUE);
+    assertEquals(Integer.MAX_VALUE, combiningState.read().longValue());
+  }
+
+  @Test
+  public void testWatermarkHoldState() throws Exception {
+    WatermarkHoldState watermarkHoldState = jstormStateInternals.state(
+        StateNamespaces.global(),
+        StateTags.watermarkStateInternal(
+            "state-id-a",
+            TimestampCombiner.EARLIEST));
+    watermarkHoldState.add(new Instant(1));
+    assertEquals(1, watermarkHoldState.read().getMillis());
+    watermarkHoldState.add(new Instant(Integer.MIN_VALUE));
+    assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
+    watermarkHoldState.add(new Instant(Integer.MAX_VALUE));
+    assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
+  }
+
+  @Test
+  public void testMapState() throws Exception {
+    MapState<Integer, Integer> mapStateA = jstormStateInternals.state(
+        StateNamespaces.global(),
+        StateTags.map("state-id-a", BigEndianIntegerCoder.of(), BigEndianIntegerCoder.of()));
+    mapStateA.put(1, 1);
+    mapStateA.put(2, 22);
+    mapStateA.put(1, 12);
+
+    Iterable<Integer> keys = mapStateA.keys().read();
+    Iterable<Integer> values = mapStateA.values().read();
+    assertThat(keys, containsInAnyOrder(1, 2));
+    assertThat(values, containsInAnyOrder(12, 22));
+
+    Iterable<Map.Entry<Integer, Integer>> entries = mapStateA.entries().read();
+    Iterator<Map.Entry<Integer, Integer>> itr = entries.iterator();
+    Map.Entry<Integer, Integer> entry = itr.next();
+    assertEquals((long) entry.getKey(), 1l);
+    assertEquals((long) entry.getValue(), 12l);
+    entry = itr.next();
+    assertEquals((long) entry.getKey(), 2l);
+    assertEquals((long) entry.getValue(), 22l);
+    assertEquals(false, itr.hasNext());
+
+    mapStateA.remove(1);
+    keys = mapStateA.keys().read();
+    values = mapStateA.values().read();
+    assertThat(keys, containsInAnyOrder(2));
+    assertThat(values, containsInAnyOrder(22));
+
+    entries = mapStateA.entries().read();
+    itr = entries.iterator();
+    entry = itr.next();
+    assertEquals((long) entry.getKey(), 2l);
+    assertEquals((long) entry.getValue(), 22l);
+    assertEquals(false, itr.hasNext());
+  }
+
+  @Test
+  public void testMassiveDataOfBagState() {
+    BagState<Integer> bagStateA = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
+
+    int count = 10000;
+    int n = 1;
+    while (n <= count) {
+      bagStateA.add(n);
+      n++;
     }
 
-    @Test
-    public void testBagState() throws Exception {
-        BagState<Integer> bagStateA = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
-        BagState<Integer> bagStateB = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.bag("state-id-b", BigEndianIntegerCoder.of()));
-
-        bagStateA.add(1);
-        bagStateA.add(0);
-        bagStateA.add(Integer.MAX_VALUE);
-
-        bagStateB.add(0);
-        bagStateB.add(Integer.MIN_VALUE);
-
-        Iterable<Integer> bagA = bagStateA.read();
-        Iterable<Integer> bagB = bagStateB.read();
-        assertThat(bagA, containsInAnyOrder(1, 0, Integer.MAX_VALUE));
-        assertThat(bagB, containsInAnyOrder(0, Integer.MIN_VALUE));
-
-        bagStateA.clear();
-        bagStateA.add(1);
-        bagStateB.add(0);
-        assertThat(bagStateA.read(), containsInAnyOrder(1));
-        assertThat(bagStateB.read(), containsInAnyOrder(0, 0, Integer.MIN_VALUE));
+    int readCount = 0;
+    int readN = 0;
+    Iterator<Integer> itr = bagStateA.read().iterator();
+    while (itr.hasNext()) {
+      readN += itr.next();
+      readCount++;
     }
 
-    @Test
-    public void testCombiningState() throws Exception {
-        Combine.CombineFn<Integer, int[], Integer> combineFn = Max.ofIntegers();
-        Coder<int[]> accumCoder = combineFn.getAccumulatorCoder(
-            CoderRegistry.createDefault(), BigEndianIntegerCoder.of());
-
-        CombiningState<Integer, int[], Integer> combiningState = jstormStateInternals.state(
-                StateNamespaces.global(),
-                StateTags.combiningValue(
-                        "state-id-a",
-                        accumCoder,
-                        combineFn));
-        assertEquals(Integer.MIN_VALUE, combiningState.read().longValue());
-        combiningState.add(10);
-        assertEquals(10, combiningState.read().longValue());
-        combiningState.add(1);
-        assertEquals(10, combiningState.read().longValue());
-        combiningState.add(Integer.MAX_VALUE);
-        assertEquals(Integer.MAX_VALUE, combiningState.read().longValue());
-    }
-
-    @Test
-    public void testWatermarkHoldState() throws Exception {
-        WatermarkHoldState watermarkHoldState = jstormStateInternals.state(
-                StateNamespaces.global(),
-                StateTags.watermarkStateInternal(
-                        "state-id-a",
-                        TimestampCombiner.EARLIEST));
-        watermarkHoldState.add(new Instant(1));
-        assertEquals(1, watermarkHoldState.read().getMillis());
-        watermarkHoldState.add(new Instant(Integer.MIN_VALUE));
-        assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
-        watermarkHoldState.add(new Instant(Integer.MAX_VALUE));
-        assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
-    }
-
-    @Test
-    public void testMapState() throws Exception {
-        MapState<Integer, Integer> mapStateA = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.map("state-id-a", BigEndianIntegerCoder.of(), BigEndianIntegerCoder.of()));
-        mapStateA.put(1, 1);
-        mapStateA.put(2, 22);
-        mapStateA.put(1, 12);
-
-        Iterable<Integer> keys = mapStateA.keys().read();
-        Iterable<Integer> values = mapStateA.values().read();
-        assertThat(keys, containsInAnyOrder(1, 2));
-        assertThat(values, containsInAnyOrder(12, 22));
-
-        Iterable<Map.Entry<Integer, Integer>> entries =  mapStateA.entries().read();
-        Iterator<Map.Entry<Integer, Integer>> itr = entries.iterator();
-        Map.Entry<Integer, Integer> entry = itr.next();
-        assertEquals((long) entry.getKey(), 1l);
-        assertEquals((long) entry.getValue(), 12l);
-        entry = itr.next();
-        assertEquals((long) entry.getKey(), 2l);
-        assertEquals((long) entry.getValue(), 22l);
-        assertEquals(false, itr.hasNext());
-
-        mapStateA.remove(1);
-        keys = mapStateA.keys().read();
-        values = mapStateA.values().read();
-        assertThat(keys, containsInAnyOrder(2));
-        assertThat(values, containsInAnyOrder(22));
-
-        entries =  mapStateA.entries().read();
-        itr = entries.iterator();
-        entry = itr.next();
-        assertEquals((long) entry.getKey(), 2l);
-        assertEquals((long) entry.getValue(), 22l);
-        assertEquals(false, itr.hasNext());
-    }
-
-    @Test
-    public void testMassiveDataOfBagState() {
-        BagState<Integer> bagStateA = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
-
-        int count = 10000;
-        int n = 1;
-        while(n <= count) {
-            bagStateA.add(n);
-            n++;
-        }
-
-        int readCount = 0;
-        int readN = 0;
-        Iterator<Integer> itr = bagStateA.read().iterator();
-        while(itr.hasNext()) {
-            readN += itr.next();
-            readCount++;
-        }
-
-        assertEquals((long) readN, ((1 + count) * count) / 2);
-        assertEquals((long) readCount, count);
-    }
+    assertEquals((long) readN, ((1 + count) * count) / 2);
+    assertEquals((long) readCount, count);
+  }
 }


[38/53] [abbrv] beam git commit: jstorm-runner: minor update for exception handling of TestJStormRunner

Posted by pe...@apache.org.
jstorm-runner: minor update for exception handling of TestJStormRunner


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/ad046483
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/ad046483
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/ad046483

Branch: refs/heads/jstorm-runner
Commit: ad046483e6d2341a4a2156e0db15f213c7f7feea
Parents: 1819833
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Wed Jul 19 10:57:40 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:59 2017 +0800

----------------------------------------------------------------------
 .../main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/ad046483/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
index 0088cf9..3124da2 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
@@ -67,7 +67,7 @@ public class TestJStormRunner extends PipelineRunner<JStormRunnerResult> {
           throw new AssertionError("Failed assertion checks.");
         } else if (taskExceptionRec != null) {
           LOG.info("Exception was found.", taskExceptionRec);
-          throw new AssertionError(taskExceptionRec.getCause());
+          throw new RuntimeException(taskExceptionRec.getCause());
         } else {
           JStormUtils.sleepMs(500);
           waitTime += 500;


[26/53] [abbrv] beam git commit: jstorm-runner: remove code that was commented out.

Posted by pe...@apache.org.
jstorm-runner: remove code that was commented out.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/8cdd41b1
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/8cdd41b1
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/8cdd41b1

Branch: refs/heads/jstorm-runner
Commit: 8cdd41b1d4c7cd5aaf96f3f9c6c2fd203c047e02
Parents: 74ceac6
Author: Pei He <pe...@apache.org>
Authored: Fri Jul 14 15:51:22 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:57 2017 +0800

----------------------------------------------------------------------
 .../beam/runners/jstorm/translation/ExecutorsBolt.java      | 9 ++++-----
 .../runners/jstorm/translation/JStormStateInternals.java    | 1 -
 .../runners/jstorm/translation/TransformTranslator.java     | 2 --
 3 files changed, 4 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/8cdd41b1/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
index ef12db8..ce6ea2c 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
@@ -319,12 +319,11 @@ public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
 
   @Override
   public String toString() {
-    // LOG.info("bolt: " + executorContext.getTopologyContext().toJSONString());
     List<String> ret = new ArrayList<>();
-        /*ret.add("inputTags");
-        for (TupleTag inputTag : inputTagToExecutor.keySet()) {
-            ret.add(inputTag.getId());
-        }*/
+    ret.add("inputTags");
+    for (TupleTag inputTag : inputTagToExecutor.keySet()) {
+      ret.add(inputTag.getId());
+    }
     ret.add("internalExecutors");
     for (Executor executor : inputTagToExecutor.values()) {
       ret.add(executor.toString());

http://git-wip-us.apache.org/repos/asf/beam/blob/8cdd41b1/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
index fce870f..78882f2 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
@@ -73,7 +73,6 @@ class JStormStateInternals<K> implements StateInternals {
   @Override
   public <T extends State> T state(
       StateNamespace namespace, StateTag<T> address, StateContext<?> c) {
-    // throw new UnsupportedOperationException("StateContext is not supported.");
     /**
      * TODO:
      * Same implementation as state() which is without StateContext. This might be updated after

http://git-wip-us.apache.org/repos/asf/beam/blob/8cdd41b1/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java
index edd3d8a..4d431d3 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TransformTranslator.java
@@ -62,7 +62,6 @@ interface TransformTranslator<T extends PTransform<?, ?>> {
                 @Override
                 public String apply(Map.Entry<TupleTag<?>, PValue> taggedPValue) {
                   return taggedPValue.getKey().getId();
-                  // return taggedPValue.getValue().getName();
                 }
               })),
           transform.getName(),
@@ -71,7 +70,6 @@ interface TransformTranslator<T extends PTransform<?, ?>> {
                 @Override
                 public String apply(Map.Entry<TupleTag<?>, PValue> taggedPvalue) {
                   return taggedPvalue.getKey().getId();
-                  //return taggedPValue.getValue().getName();
                 }
               })));
     }


[02/53] [abbrv] beam git commit: jstorm-runner: rename the package to org.apache.beam.runners.jstorm.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java
new file mode 100644
index 0000000..481b7fb
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.util;
+
+import org.apache.beam.runners.core.ExecutionContext;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.runners.core.StateInternals;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+import java.io.IOException;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+/**
+ * Default StepContext for running DoFn This does not allow accessing state or timer internals.
+ */
+public class DefaultStepContext implements ExecutionContext.StepContext {
+
+    private TimerInternals timerInternals;
+
+    private StateInternals stateInternals;
+
+    public DefaultStepContext(TimerInternals timerInternals, StateInternals stateInternals) {
+        this.timerInternals = checkNotNull(timerInternals, "timerInternals");
+        this.stateInternals = checkNotNull(stateInternals, "stateInternals");
+    }
+
+    @Override
+    public String getStepName() {
+        return null;
+    }
+
+    @Override
+    public String getTransformName() {
+        return null;
+    }
+
+    @Override
+    public void noteOutput(WindowedValue<?> windowedValue) {
+
+    }
+
+    @Override
+    public void noteOutput(TupleTag<?> tupleTag, WindowedValue<?> windowedValue) {
+
+    }
+
+    @Override
+    public <T, W extends BoundedWindow> void writePCollectionViewData(TupleTag<?> tag, Iterable<WindowedValue<T>> data,
+            Coder<Iterable<WindowedValue<T>>> dataCoder, W window, Coder<W> windowCoder) throws IOException {
+        throw new UnsupportedOperationException("Writing side-input data is not supported.");
+    }
+
+    @Override
+    public StateInternals stateInternals() {
+        return stateInternals;
+    }
+
+    @Override
+    public TimerInternals timerInternals() {
+        return timerInternals;
+    }
+
+    public void setStateInternals(StateInternals stateInternals) {
+        this.stateInternals = stateInternals;
+    }
+
+    public void setTimerInternals(TimerInternals timerInternals) {
+        this.timerInternals = timerInternals;
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
new file mode 100644
index 0000000..cbf815a
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.util;
+
+import org.apache.beam.runners.jstorm.translation.runtime.Executor;
+
+import org.apache.beam.runners.jstorm.translation.runtime.GroupByWindowExecutor;
+import org.apache.beam.runners.jstorm.translation.runtime.MultiStatefulDoFnExecutor;
+import org.apache.beam.runners.jstorm.translation.runtime.StatefulDoFnExecutor;
+import org.apache.beam.runners.core.KeyedWorkItem;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+
+public class RunnerUtils {
+    /**
+     * Convert WindowedValue<KV<>> into KeyedWorkItem<K, WindowedValue<V>>
+     * @param elem
+     * @return
+     */
+    public static <K, V> KeyedWorkItem<K, V> toKeyedWorkItem(WindowedValue<KV<K, V>> elem) {
+        WindowedValue<KV<K, V>> kvElem = (WindowedValue<KV<K, V>>) elem;
+        SingletonKeyedWorkItem<K, V> workItem = SingletonKeyedWorkItem.of(
+                kvElem.getValue().getKey(),
+                kvElem.withValue(kvElem.getValue().getValue()));
+        return workItem;
+    }
+
+    public static boolean isGroupByKeyExecutor (Executor executor) {
+        if (executor instanceof GroupByWindowExecutor) {
+            return true;
+        } else if (executor instanceof StatefulDoFnExecutor ||
+                executor instanceof MultiStatefulDoFnExecutor) {
+            return true;
+        } else {
+            return false;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
new file mode 100644
index 0000000..391699b
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.runners.jstorm.util;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.beam.sdk.options.PipelineOptions;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.Serializable;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+/**
+ * Encapsulates the PipelineOptions in serialized form to ship them to the cluster.
+ */
+public class SerializedPipelineOptions implements Serializable {
+
+    private final byte[] serializedOptions;
+
+    /** Lazily initialized copy of deserialized options */
+    private transient PipelineOptions pipelineOptions;
+
+    public SerializedPipelineOptions(PipelineOptions options) {
+        checkNotNull(options, "PipelineOptions must not be null.");
+
+        try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
+            new ObjectMapper().writeValue(baos, options);
+            this.serializedOptions = baos.toByteArray();
+        } catch (Exception e) {
+            throw new RuntimeException("Couldn't serialize PipelineOptions.", e);
+        }
+
+    }
+
+    public PipelineOptions getPipelineOptions() {
+        if (pipelineOptions == null) {
+            try {
+                pipelineOptions = new ObjectMapper().readValue(serializedOptions, PipelineOptions.class);
+            } catch (IOException e) {
+                throw new RuntimeException("Couldn't deserialize the PipelineOptions.", e);
+            }
+        }
+
+        return pipelineOptions;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
new file mode 100644
index 0000000..dee5f1a
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.util;
+
+import java.util.Collections;
+import org.apache.beam.runners.core.KeyedWorkItem;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.sdk.util.WindowedValue;
+
+/**
+ * Singleton keyed word item.
+ * @param <K>
+ * @param <ElemT>
+ */
+public class SingletonKeyedWorkItem<K, ElemT> implements KeyedWorkItem<K, ElemT> {
+
+  final K key;
+  final WindowedValue<ElemT> value;
+
+  private SingletonKeyedWorkItem(K key, WindowedValue<ElemT> value) {
+    this.key = key;
+    this.value = value;
+  }
+
+  public static <K, ElemT> SingletonKeyedWorkItem<K, ElemT> of(K key, WindowedValue<ElemT> value) {
+      return new SingletonKeyedWorkItem<K, ElemT>(key, value);
+  }
+
+  @Override
+  public K key() {
+    return key;
+  }
+
+  public WindowedValue<ElemT> value() {
+    return value;
+  }
+
+  @Override
+  public Iterable<TimerInternals.TimerData> timersIterable() {
+    return Collections.EMPTY_LIST;
+  }
+
+  @Override
+  public Iterable<WindowedValue<ElemT>> elementsIterable() {
+    return Collections.singletonList(value);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormStateInternalsTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormStateInternalsTest.java b/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormStateInternalsTest.java
deleted file mode 100644
index 0ecffff..0000000
--- a/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/runtime/state/JStormStateInternalsTest.java
+++ /dev/null
@@ -1,219 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime.state;
-
-import avro.shaded.com.google.common.collect.Maps;
-import com.alibaba.jstorm.beam.translation.runtime.TimerServiceImpl;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.alibaba.jstorm.cache.rocksdb.RocksDbKvStoreManagerFactory;
-import com.alibaba.jstorm.utils.KryoSerializer;
-
-import org.apache.beam.runners.core.StateNamespaces;
-import org.apache.beam.runners.core.StateTags;
-import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.coders.CoderRegistry;
-import org.apache.beam.sdk.state.*;
-import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.Max;
-import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
-import org.joda.time.Instant;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-import java.util.Iterator;
-import java.util.Map;
-
-import static org.hamcrest.Matchers.containsInAnyOrder;
-import static org.hamcrest.Matchers.hasEntry;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-
-/**
- * Tests for {@link JStormStateInternals}.
- */
-@RunWith(JUnit4.class)
-public class JStormStateInternalsTest {
-
-    @Rule
-    public final TemporaryFolder tmp = new TemporaryFolder();
-
-    private JStormStateInternals<String> jstormStateInternals;
-
-    @Before
-    public void setup() throws Exception {
-        IKvStoreManager kvStoreManager = RocksDbKvStoreManagerFactory.getManager(
-                Maps.newHashMap(),
-                "test",
-                tmp.toString(),
-                new KryoSerializer(Maps.newHashMap()));
-        jstormStateInternals = new JStormStateInternals("key-1", kvStoreManager, new TimerServiceImpl(), 0);
-    }
-
-    @Test
-    public void testValueState() throws Exception {
-        ValueState<Integer> valueState = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
-        valueState.write(Integer.MIN_VALUE);
-        assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
-        valueState.write(Integer.MAX_VALUE);
-        assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
-    }
-
-    @Test
-    public void testValueStateIdenticalId() throws Exception {
-        ValueState<Integer> valueState = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
-        ValueState<Integer> valueStateIdentical = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
-
-        valueState.write(Integer.MIN_VALUE);
-        assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
-        assertEquals(Integer.MIN_VALUE, valueStateIdentical.read().longValue());
-        valueState.write(Integer.MAX_VALUE);
-        assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
-        assertEquals(Integer.MAX_VALUE, valueStateIdentical.read().longValue());
-    }
-
-    @Test
-    public void testBagState() throws Exception {
-        BagState<Integer> bagStateA = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
-        BagState<Integer> bagStateB = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.bag("state-id-b", BigEndianIntegerCoder.of()));
-
-        bagStateA.add(1);
-        bagStateA.add(0);
-        bagStateA.add(Integer.MAX_VALUE);
-
-        bagStateB.add(0);
-        bagStateB.add(Integer.MIN_VALUE);
-
-        Iterable<Integer> bagA = bagStateA.read();
-        Iterable<Integer> bagB = bagStateB.read();
-        assertThat(bagA, containsInAnyOrder(1, 0, Integer.MAX_VALUE));
-        assertThat(bagB, containsInAnyOrder(0, Integer.MIN_VALUE));
-
-        bagStateA.clear();
-        bagStateA.add(1);
-        bagStateB.add(0);
-        assertThat(bagStateA.read(), containsInAnyOrder(1));
-        assertThat(bagStateB.read(), containsInAnyOrder(0, 0, Integer.MIN_VALUE));
-    }
-
-    @Test
-    public void testCombiningState() throws Exception {
-        Combine.CombineFn<Integer, int[], Integer> combineFn = Max.ofIntegers();
-        Coder<int[]> accumCoder = combineFn.getAccumulatorCoder(
-            CoderRegistry.createDefault(), BigEndianIntegerCoder.of());
-
-        CombiningState<Integer, int[], Integer> combiningState = jstormStateInternals.state(
-                StateNamespaces.global(),
-                StateTags.combiningValue(
-                        "state-id-a",
-                        accumCoder,
-                        combineFn));
-        assertEquals(Integer.MIN_VALUE, combiningState.read().longValue());
-        combiningState.add(10);
-        assertEquals(10, combiningState.read().longValue());
-        combiningState.add(1);
-        assertEquals(10, combiningState.read().longValue());
-        combiningState.add(Integer.MAX_VALUE);
-        assertEquals(Integer.MAX_VALUE, combiningState.read().longValue());
-    }
-
-    @Test
-    public void testWatermarkHoldState() throws Exception {
-        WatermarkHoldState watermarkHoldState = jstormStateInternals.state(
-                StateNamespaces.global(),
-                StateTags.watermarkStateInternal(
-                        "state-id-a",
-                        TimestampCombiner.EARLIEST));
-        watermarkHoldState.add(new Instant(1));
-        assertEquals(1, watermarkHoldState.read().getMillis());
-        watermarkHoldState.add(new Instant(Integer.MIN_VALUE));
-        assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
-        watermarkHoldState.add(new Instant(Integer.MAX_VALUE));
-        assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
-    }
-
-    @Test
-    public void testMapState() throws Exception {
-        MapState<Integer, Integer> mapStateA = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.map("state-id-a", BigEndianIntegerCoder.of(), BigEndianIntegerCoder.of()));
-        mapStateA.put(1, 1);
-        mapStateA.put(2, 22);
-        mapStateA.put(1, 12);
-
-        Iterable<Integer> keys = mapStateA.keys().read();
-        Iterable<Integer> values = mapStateA.values().read();
-        assertThat(keys, containsInAnyOrder(1, 2));
-        assertThat(values, containsInAnyOrder(12, 22));
-
-        Iterable<Map.Entry<Integer, Integer>> entries =  mapStateA.entries().read();
-        Iterator<Map.Entry<Integer, Integer>> itr = entries.iterator();
-        Map.Entry<Integer, Integer> entry = itr.next();
-        assertEquals((long) entry.getKey(), 1l);
-        assertEquals((long) entry.getValue(), 12l);
-        entry = itr.next();
-        assertEquals((long) entry.getKey(), 2l);
-        assertEquals((long) entry.getValue(), 22l);
-        assertEquals(false, itr.hasNext());
-
-        mapStateA.remove(1);
-        keys = mapStateA.keys().read();
-        values = mapStateA.values().read();
-        assertThat(keys, containsInAnyOrder(2));
-        assertThat(values, containsInAnyOrder(22));
-
-        entries =  mapStateA.entries().read();
-        itr = entries.iterator();
-        entry = itr.next();
-        assertEquals((long) entry.getKey(), 2l);
-        assertEquals((long) entry.getValue(), 22l);
-        assertEquals(false, itr.hasNext());
-    }
-
-    @Test
-    public void testMassiveDataOfBagState() {
-        BagState<Integer> bagStateA = jstormStateInternals.state(
-                StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
-
-        int count = 10000;
-        int n = 1;
-        while(n <= count) {
-            bagStateA.add(n);
-            n++;
-        }
-
-        int readCount = 0;
-        int readN = 0;
-        Iterator<Integer> itr = bagStateA.read().iterator();
-        while(itr.hasNext()) {
-            readN += itr.next();
-            readCount++;
-        }
-
-        assertEquals((long) readN, ((1 + count) * count) / 2);
-        assertEquals((long) readCount, count);
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/CoGroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/CoGroupByKeyTest.java b/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/CoGroupByKeyTest.java
deleted file mode 100644
index 4f69c93..0000000
--- a/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/CoGroupByKeyTest.java
+++ /dev/null
@@ -1,302 +0,0 @@
-package com.alibaba.jstorm.beam.translation.translator;
-
-import com.alibaba.jstorm.beam.StormPipelineOptions;
-import com.alibaba.jstorm.beam.TestJStormRunner;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
-import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.coders.StringUtf8Coder;
-import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.testing.PAssert;
-import org.apache.beam.sdk.testing.ValidatesRunner;
-import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.SerializableFunction;
-import org.apache.beam.sdk.transforms.join.CoGbkResult;
-import org.apache.beam.sdk.transforms.join.CoGroupByKey;
-import org.apache.beam.sdk.transforms.join.KeyedPCollectionTuple;
-import org.apache.beam.sdk.transforms.windowing.FixedWindows;
-import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
-import org.apache.beam.sdk.transforms.windowing.Window;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.TupleTag;
-import org.joda.time.Duration;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
-import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-
-@RunWith(JUnit4.class)
-public class CoGroupByKeyTest implements Serializable {
-    /**
-     * Converts the given list into a PCollection belonging to the provided
-     * Pipeline in such a way that coder inference needs to be performed.
-     */
-    private PCollection<KV<Integer, String>> createInput(String name,
-                                                         Pipeline p, List<KV<Integer, String>> list) {
-        return createInput(name, p, list,  new ArrayList<Long>());
-    }
-
-    /**
-     * Converts the given list with timestamps into a PCollection.
-     */
-    private PCollection<KV<Integer, String>> createInput(String name,
-                                                         Pipeline p, List<KV<Integer, String>> list, List<Long> timestamps) {
-        PCollection<KV<Integer, String>> input;
-        if (timestamps.isEmpty()) {
-            input = p.apply("Create" + name, Create.of(list)
-                    .withCoder(KvCoder.of(BigEndianIntegerCoder.of(), StringUtf8Coder.of())));
-        } else {
-            input = p.apply("Create" + name, Create.timestamped(list, timestamps)
-                    .withCoder(KvCoder.of(BigEndianIntegerCoder.of(), StringUtf8Coder.of())));
-        }
-        return input.apply(
-                "Identity" + name,
-                ParDo.of(
-                        new DoFn<KV<Integer, String>, KV<Integer, String>>() {
-                            @ProcessElement
-                            public void processElement(ProcessContext c) {
-                                c.output(c.element());
-                            }
-                        }));
-    }
-
-    /**
-     * Returns a {@code PCollection<KV<Integer, CoGbkResult>>} containing the result
-     * of a {@link CoGroupByKey} over 2 {@code PCollection<KV<Integer, String>>},
-     * where each {@link PCollection} has no duplicate keys and the key sets of
-     * each {@link PCollection} are intersecting but neither is a subset of the other.
-     */
-    private PCollection<KV<Integer, CoGbkResult>> buildGetOnlyGbk(
-            Pipeline p,
-            TupleTag<String> tag1,
-            TupleTag<String> tag2) {
-        List<KV<Integer, String>> list1 =
-                Arrays.asList(
-                        KV.of(1, "collection1-1"),
-                        KV.of(2, "collection1-2"));
-        List<KV<Integer, String>> list2 =
-                Arrays.asList(
-                        KV.of(2, "collection2-2"),
-                        KV.of(3, "collection2-3"));
-        PCollection<KV<Integer, String>> collection1 = createInput("CreateList1", p, list1);
-        PCollection<KV<Integer, String>> collection2 = createInput("CreateList2", p, list2);
-        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
-                KeyedPCollectionTuple.of(tag1, collection1)
-                        .and(tag2, collection2)
-                        .apply(CoGroupByKey.<Integer>create());
-        return coGbkResults;
-    }
-
-    @Test
-    @Category(ValidatesRunner.class)
-    public void testCoGroupByKeyGetOnly() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        options.setLocalMode(true);
-
-        Pipeline p = Pipeline.create(options);
-
-        final TupleTag<String> tag1 = new TupleTag<>();
-        final TupleTag<String> tag2 = new TupleTag<>();
-
-        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
-                buildGetOnlyGbk(p, tag1, tag2);
-
-        PAssert.thatMap(coGbkResults).satisfies(
-                new SerializableFunction<Map<Integer, CoGbkResult>, Void>() {
-                    @Override
-                    public Void apply(Map<Integer, CoGbkResult> results) {
-                        assertEquals("collection1-1", results.get(1).getOnly(tag1));
-                        assertEquals("collection1-2", results.get(2).getOnly(tag1));
-                        assertEquals("collection2-2", results.get(2).getOnly(tag2));
-                        assertEquals("collection2-3", results.get(3).getOnly(tag2));
-                        return null;
-                    }
-                });
-
-        p.run();
-    }
-
-    /**
-     * Returns a {@code PCollection<KV<Integer, CoGbkResult>>} containing the
-     * results of the {@code CoGroupByKey} over three
-     * {@code PCollection<KV<Integer, String>>}, each of which correlates
-     * a customer id to purchases, addresses, or names, respectively.
-     */
-    private PCollection<KV<Integer, CoGbkResult>> buildPurchasesCoGbk(
-            Pipeline p,
-            TupleTag<String> purchasesTag,
-            TupleTag<String> addressesTag,
-            TupleTag<String> namesTag) {
-        List<KV<Integer, String>> idToPurchases =
-                Arrays.asList(
-                        KV.of(2, "Boat"),
-                        KV.of(1, "Shoes"),
-                        KV.of(3, "Car"),
-                        KV.of(1, "Book"),
-                        KV.of(10, "Pens"),
-                        KV.of(8, "House"),
-                        KV.of(4, "Suit"),
-                        KV.of(11, "House"),
-                        KV.of(14, "Shoes"),
-                        KV.of(2, "Suit"),
-                        KV.of(8, "Suit Case"),
-                        KV.of(3, "House"));
-
-        List<KV<Integer, String>> idToAddress =
-                Arrays.asList(
-                        KV.of(2, "53 S. 3rd"),
-                        KV.of(10, "383 Jackson Street"),
-                        KV.of(20, "3 W. Arizona"),
-                        KV.of(3, "29 School Rd"),
-                        KV.of(8, "6 Watling Rd"));
-
-        List<KV<Integer, String>> idToName =
-                Arrays.asList(
-                        KV.of(1, "John Smith"),
-                        KV.of(2, "Sally James"),
-                        KV.of(8, "Jeffery Spalding"),
-                        KV.of(20, "Joan Lichtfield"));
-
-        PCollection<KV<Integer, String>> purchasesTable =
-                createInput("CreateIdToPurchases", p, idToPurchases);
-
-        PCollection<KV<Integer, String>> addressTable =
-                createInput("CreateIdToAddress", p, idToAddress);
-
-        PCollection<KV<Integer, String>> nameTable =
-                createInput("CreateIdToName", p, idToName);
-
-        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
-                KeyedPCollectionTuple.of(namesTag, nameTable)
-                        .and(addressesTag, addressTable)
-                        .and(purchasesTag, purchasesTable)
-                        .apply(CoGroupByKey.<Integer>create());
-        return coGbkResults;
-    }
-
-    /**
-     * Returns a {@code PCollection<KV<Integer, CoGbkResult>>} containing the
-     * results of the {@code CoGroupByKey} over 2 {@code PCollection<KV<Integer, String>>},
-     * each of which correlates a customer id to clicks, purchases, respectively.
-     */
-    private PCollection<KV<Integer, CoGbkResult>> buildPurchasesCoGbkWithWindowing(
-            Pipeline p,
-            TupleTag<String> clicksTag,
-            TupleTag<String> purchasesTag) {
-        List<KV<Integer, String>> idToClick =
-                Arrays.asList(
-                        KV.of(1, "Click t0"),
-                        KV.of(2, "Click t2"),
-                        KV.of(1, "Click t4"),
-                        KV.of(1, "Click t6"),
-                        KV.of(2, "Click t8"));
-
-        List<KV<Integer, String>> idToPurchases =
-                Arrays.asList(
-                        KV.of(1, "Boat t1"),
-                        KV.of(1, "Shoesi t2"),
-                        KV.of(1, "Pens t3"),
-                        KV.of(2, "House t4"),
-                        KV.of(2, "Suit t5"),
-                        KV.of(1, "Car t6"),
-                        KV.of(1, "Book t7"),
-                        KV.of(2, "House t8"),
-                        KV.of(2, "Shoes t9"),
-                        KV.of(2, "House t10"));
-
-        PCollection<KV<Integer, String>> clicksTable =
-                createInput("CreateClicks",
-                        p,
-                        idToClick,
-                        Arrays.asList(0L, 2L, 4L, 6L, 8L))
-                        .apply("WindowClicks", Window.<KV<Integer, String>>into(
-                                FixedWindows.of(new Duration(4)))
-                                .withTimestampCombiner(TimestampCombiner.EARLIEST));
-
-        PCollection<KV<Integer, String>> purchasesTable =
-                createInput("CreatePurchases",
-                        p,
-                        idToPurchases,
-                        Arrays.asList(1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L))
-                        .apply("WindowPurchases", Window.<KV<Integer, String>>into(
-                                FixedWindows.of(new Duration(4)))
-                                .withTimestampCombiner(TimestampCombiner.EARLIEST));
-
-        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
-                KeyedPCollectionTuple.of(clicksTag, clicksTable)
-                        .and(purchasesTag, purchasesTable)
-                        .apply(CoGroupByKey.<Integer>create());
-        return coGbkResults;
-    }
-
-    @Test
-    @Category(ValidatesRunner.class)
-    public void testCoGroupByKey() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        options.setLocalMode(true);
-
-        Pipeline p = Pipeline.create(options);
-
-        final TupleTag<String> namesTag = new TupleTag<>();
-        final TupleTag<String> addressesTag = new TupleTag<>();
-        final TupleTag<String> purchasesTag = new TupleTag<>();
-
-
-        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
-                buildPurchasesCoGbk(p, purchasesTag, addressesTag, namesTag);
-
-        PAssert.thatMap(coGbkResults).satisfies(
-                new SerializableFunction<Map<Integer, CoGbkResult>, Void>() {
-                    @Override
-                    public Void apply(Map<Integer, CoGbkResult> results) {
-                        CoGbkResult result1 = results.get(1);
-                        assertEquals("John Smith", result1.getOnly(namesTag));
-                        assertThat(result1.getAll(purchasesTag), containsInAnyOrder("Shoes", "Book"));
-
-                        CoGbkResult result2 = results.get(2);
-                        assertEquals("Sally James", result2.getOnly(namesTag));
-                        assertEquals("53 S. 3rd", result2.getOnly(addressesTag));
-                        assertThat(result2.getAll(purchasesTag), containsInAnyOrder("Suit", "Boat"));
-
-                        CoGbkResult result3 = results.get(3);
-                        assertEquals("29 School Rd", result3.getOnly(addressesTag), "29 School Rd");
-                        assertThat(result3.getAll(purchasesTag), containsInAnyOrder("Car", "House"));
-
-                        CoGbkResult result8 = results.get(8);
-                        assertEquals("Jeffery Spalding", result8.getOnly(namesTag));
-                        assertEquals("6 Watling Rd", result8.getOnly(addressesTag));
-                        assertThat(result8.getAll(purchasesTag), containsInAnyOrder("House", "Suit Case"));
-
-                        CoGbkResult result20 = results.get(20);
-                        assertEquals("Joan Lichtfield", result20.getOnly(namesTag));
-                        assertEquals("3 W. Arizona", result20.getOnly(addressesTag));
-
-                        assertEquals("383 Jackson Street", results.get(10).getOnly(addressesTag));
-
-                        assertThat(results.get(4).getAll(purchasesTag), containsInAnyOrder("Suit"));
-                        assertThat(results.get(10).getAll(purchasesTag), containsInAnyOrder("Pens"));
-                        assertThat(results.get(11).getAll(purchasesTag), containsInAnyOrder("House"));
-                        assertThat(results.get(14).getAll(purchasesTag), containsInAnyOrder("Shoes"));
-
-                        return null;
-                    }
-                });
-
-        p.run();
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/GroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/GroupByKeyTest.java b/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/GroupByKeyTest.java
deleted file mode 100644
index 5ec6636..0000000
--- a/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/GroupByKeyTest.java
+++ /dev/null
@@ -1,159 +0,0 @@
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import com.alibaba.jstorm.beam.StormPipelineOptions;
-
-import com.alibaba.jstorm.beam.TestJStormRunner;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
-import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.coders.StringUtf8Coder;
-import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.testing.PAssert;
-import org.apache.beam.sdk.testing.ValidatesRunner;
-import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.transforms.SerializableFunction;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.hamcrest.Description;
-import org.hamcrest.Matcher;
-import org.hamcrest.TypeSafeMatcher;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-import java.util.Arrays;
-import java.util.List;
-
-import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertThat;
-
-/**
- * Tests for {@link GroupByKey} with {@link com.alibaba.jstorm.beam.StormRunner}.
- */
-@RunWith(JUnit4.class)
-public class GroupByKeyTest {
-
-    static final String[] WORDS_ARRAY = new String[] {
-            "hi", "there", "hi", "hi", "sue", "bob",
-            "hi", "sue", "", "", "ZOW", "bob", "" };
-
-    static final List<String> WORDS = Arrays.asList(WORDS_ARRAY);
-
-    @Test
-    public void testGroupByKey() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        options.setLocalMode(true);
-
-        Pipeline p = Pipeline.create(options);
-
-        List<KV<String, Integer>> ungroupedPairs = Arrays.asList(
-                KV.of("k1", 3),
-                KV.of("k5", Integer.MAX_VALUE),
-                KV.of("k5", Integer.MIN_VALUE),
-                KV.of("k2", 66),
-                KV.of("k1", 4),
-                KV.of("k2", -33),
-                KV.of("k3", 0));
-
-        PCollection<KV<String, Integer>> input =
-                p.apply(Create.of(ungroupedPairs)
-                        .withCoder(KvCoder.of(StringUtf8Coder.of(), BigEndianIntegerCoder.of())));
-
-        PCollection<KV<String, Iterable<Integer>>> output =
-                input.apply(GroupByKey.<String, Integer>create());
-
-        PAssert.that(output)
-                .satisfies(new AssertThatHasExpectedContentsForTestGroupByKey());
-
-        p.run();
-    }
-
-    @Test
-    public void testCountGloballyBasic() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        options.setLocalMode(true);
-
-        Pipeline p = Pipeline.create(options);
-        PCollection<String> input = p.apply(Create.of(WORDS));
-
-        PCollection<Long> output =
-                input.apply(Count.<String>globally());
-
-        PAssert.that(output)
-                .containsInAnyOrder(13L);
-        p.run();
-    }
-
-    static class AssertThatHasExpectedContentsForTestGroupByKey
-            implements SerializableFunction<Iterable<KV<String, Iterable<Integer>>>,
-            Void> {
-        @Override
-        public Void apply(Iterable<KV<String, Iterable<Integer>>> actual) {
-            assertThat(actual, containsInAnyOrder(
-                    KvMatcher.isKv(is("k1"), containsInAnyOrder(3, 4)),
-                    KvMatcher.isKv(is("k5"), containsInAnyOrder(Integer.MAX_VALUE,
-                            Integer.MIN_VALUE)),
-                    KvMatcher.isKv(is("k2"), containsInAnyOrder(66, -33)),
-                    KvMatcher.isKv(is("k3"), containsInAnyOrder(0))));
-            return null;
-        }
-    }
-
-    /**
-     * Matcher for KVs.
-     */
-    public static class KvMatcher<K, V>
-            extends TypeSafeMatcher<KV<? extends K, ? extends V>> {
-        final Matcher<? super K> keyMatcher;
-        final Matcher<? super V> valueMatcher;
-
-        public static <K, V> KvMatcher<K, V> isKv(Matcher<K> keyMatcher,
-                                                  Matcher<V> valueMatcher) {
-            return new KvMatcher<>(keyMatcher, valueMatcher);
-        }
-
-        public KvMatcher(Matcher<? super K> keyMatcher,
-                         Matcher<? super V> valueMatcher) {
-            this.keyMatcher = keyMatcher;
-            this.valueMatcher = valueMatcher;
-        }
-
-        @Override
-        public boolean matchesSafely(KV<? extends K, ? extends V> kv) {
-            return keyMatcher.matches(kv.getKey())
-                    && valueMatcher.matches(kv.getValue());
-        }
-
-        @Override
-        public void describeTo(Description description) {
-            description
-                    .appendText("a KV(").appendValue(keyMatcher)
-                    .appendText(", ").appendValue(valueMatcher)
-                    .appendText(")");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/ParDoTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/ParDoTest.java b/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/ParDoTest.java
deleted file mode 100644
index da0aafe..0000000
--- a/runners/jstorm/src/test/java/com/alibaba/jstorm/beam/translation/translator/ParDoTest.java
+++ /dev/null
@@ -1,626 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.translator;
-
-import com.alibaba.jstorm.beam.StormPipelineOptions;
-
-import com.alibaba.jstorm.beam.TestJStormRunner;
-import com.google.common.base.MoreObjects;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.coders.*;
-import org.apache.beam.sdk.io.GenerateSequence;
-import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.state.*;
-import org.apache.beam.sdk.testing.PAssert;
-import org.apache.beam.sdk.testing.UsesMapState;
-import org.apache.beam.sdk.testing.UsesStatefulParDo;
-import org.apache.beam.sdk.testing.ValidatesRunner;
-import org.apache.beam.sdk.transforms.*;
-import org.apache.beam.sdk.transforms.windowing.*;
-import org.apache.beam.sdk.values.*;
-import org.joda.time.Duration;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Serializable;
-import java.util.*;
-
-import static org.hamcrest.Matchers.anyOf;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
-import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-
-/**
- * Tests for {@link ParDo} with {@link com.alibaba.jstorm.beam.StormRunner}.
- */
-@RunWith(JUnit4.class)
-public class ParDoTest implements Serializable {
-
-    @Test
-    public void testParDo() throws IOException {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        List<Integer> inputs = Arrays.asList(3, -42, 666);
-
-        PCollection<String> output = pipeline
-                .apply(Create.of(inputs))
-                .apply(ParDo.of(new TestDoFn()));
-
-        PAssert.that(output)
-                .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
-
-        pipeline.run();
-    }
-
-    @Test
-    public void testParDoWithSideInputs() throws IOException {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        List<Integer> inputs = Arrays.asList(3, -42, 666);
-
-        PCollectionView<Integer> sideInput1 = pipeline
-                .apply("CreateSideInput1", Create.of(11))
-                .apply("ViewSideInput1", View.<Integer>asSingleton());
-        PCollectionView<Integer> sideInputUnread = pipeline
-                .apply("CreateSideInputUnread", Create.of(-3333))
-                .apply("ViewSideInputUnread", View.<Integer>asSingleton());
-
-        PCollectionView<Integer> sideInput2 = pipeline
-                .apply("CreateSideInput2", Create.of(222))
-                .apply("ViewSideInput2", View.<Integer>asSingleton());
-        PCollection<String> output = pipeline
-                .apply(Create.of(inputs))
-                .apply(ParDo.of(new TestDoFn(
-                                Arrays.asList(sideInput1, sideInput2),
-                                Arrays.<TupleTag<String>>asList()))
-                        .withSideInputs(sideInput1, sideInputUnread, sideInput2));
-
-        PAssert.that(output)
-                .satisfies(ParDoTest.HasExpectedOutput
-                        .forInput(inputs)
-                        .andSideInputs(11, 222));
-
-        pipeline.run();
-    }
-
-    @Test
-    public void testParDoWithTaggedOutput() {
-        List<Integer> inputs = Arrays.asList(3, -42, 666);
-
-        TupleTag<String> mainOutputTag = new TupleTag<String>("main"){};
-        TupleTag<String> additionalOutputTag1 = new TupleTag<String>("additional1"){};
-        TupleTag<String> additionalOutputTag2 = new TupleTag<String>("additional2"){};
-        TupleTag<String> additionalOutputTag3 = new TupleTag<String>("additional3"){};
-        TupleTag<String> additionalOutputTagUnwritten = new TupleTag<String>("unwrittenOutput"){};
-
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        PCollectionTuple outputs = pipeline
-            .apply(Create.of(inputs))
-            .apply(ParDo
-                .of(new TestDoFn(
-                    Arrays.<PCollectionView<Integer>>asList(),
-                    Arrays.asList(additionalOutputTag1, additionalOutputTag2, additionalOutputTag3)))
-                .withOutputTags(
-                    mainOutputTag,
-                    TupleTagList.of(additionalOutputTag3)
-                        .and(additionalOutputTag1)
-                        .and(additionalOutputTagUnwritten)
-                        .and(additionalOutputTag2)));
-
-        PAssert.that(outputs.get(mainOutputTag))
-            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
-
-        PAssert.that(outputs.get(additionalOutputTag1))
-            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                .fromOutput(additionalOutputTag1));
-        PAssert.that(outputs.get(additionalOutputTag2))
-            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                .fromOutput(additionalOutputTag2));
-        PAssert.that(outputs.get(additionalOutputTag3))
-            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                .fromOutput(additionalOutputTag3));
-        PAssert.that(outputs.get(additionalOutputTagUnwritten)).empty();
-
-        pipeline.run();
-    }
-
-    @Test
-    public void testNoWindowFnDoesNotReassignWindows() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        final PCollection<Long> initialWindows =
-                pipeline
-                    .apply(GenerateSequence.from(0).to(10))
-                    .apply("AssignWindows", Window.into(new WindowOddEvenBuckets()));
-
-        // Sanity check the window assignment to demonstrate the baseline
-        PAssert.that(initialWindows)
-                .inWindow(WindowOddEvenBuckets.EVEN_WINDOW)
-                .containsInAnyOrder(0L, 2L, 4L, 6L, 8L);
-        PAssert.that(initialWindows)
-                .inWindow(WindowOddEvenBuckets.ODD_WINDOW)
-                .containsInAnyOrder(1L, 3L, 5L, 7L, 9L);
-
-        PCollection<Boolean> upOne =
-                initialWindows.apply(
-                        "ModifyTypes",
-                        MapElements.<Long, Boolean>via(
-                                new SimpleFunction<Long, Boolean>() {
-                                    @Override
-                                    public Boolean apply(Long input) {
-                                        return input % 2 == 0;
-                                    }
-                                }));
-        PAssert.that(upOne)
-                .inWindow(WindowOddEvenBuckets.EVEN_WINDOW)
-                .containsInAnyOrder(true, true, true, true, true);
-        PAssert.that(upOne)
-                .inWindow(WindowOddEvenBuckets.ODD_WINDOW)
-                .containsInAnyOrder(false, false, false, false, false);
-
-        // The elements should be in the same windows, even though they would not be assigned to the
-        // same windows with the updated timestamps. If we try to apply the original WindowFn, the type
-        // will not be appropriate and the runner should crash, as a Boolean cannot be converted into
-        // a long.
-        PCollection<Boolean> updatedTrigger =
-                upOne.apply(
-                        "UpdateWindowingStrategy",
-                        Window.<Boolean>configure().triggering(Never.ever())
-                                .withAllowedLateness(Duration.ZERO)
-                                .accumulatingFiredPanes());
-        pipeline.run();
-    }
-
-    @Test
-    public void testValueStateSameId() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        final String stateId = "foo";
-
-        DoFn<KV<String, Integer>, KV<String, Integer>> fn =
-                new DoFn<KV<String, Integer>, KV<String, Integer>>() {
-
-                    @StateId(stateId)
-                    private final StateSpec<ValueState<Integer>> intState =
-                            StateSpecs.value(VarIntCoder.of());
-
-                    @ProcessElement
-                    public void processElement(
-                            ProcessContext c, @StateId(stateId) ValueState<Integer> state) {
-                        Integer currentValue = MoreObjects.firstNonNull(state.read(), 0);
-                        c.output(KV.of("sizzle", currentValue));
-                        state.write(currentValue + 1);
-                    }
-                };
-
-        DoFn<KV<String, Integer>, Integer> fn2 =
-                new DoFn<KV<String, Integer>, Integer>() {
-
-                    @StateId(stateId)
-                    private final StateSpec<ValueState<Integer>> intState =
-                            StateSpecs.value(VarIntCoder.of());
-
-                    @ProcessElement
-                    public void processElement(
-                            ProcessContext c, @StateId(stateId) ValueState<Integer> state) {
-                        Integer currentValue = MoreObjects.firstNonNull(state.read(), 13);
-                        c.output(currentValue);
-                        state.write(currentValue + 13);
-                    }
-                };
-
-        PCollection<KV<String, Integer>> intermediate =
-                pipeline.apply(Create.of(KV.of("hello", 42), KV.of("hello", 97), KV.of("hello", 84)))
-                        .apply("First stateful ParDo", ParDo.of(fn));
-
-        PCollection<Integer> output =
-                intermediate.apply("Second stateful ParDo", ParDo.of(fn2));
-
-        PAssert.that(intermediate)
-                .containsInAnyOrder(KV.of("sizzle", 0), KV.of("sizzle", 1), KV.of("sizzle", 2));
-        PAssert.that(output).containsInAnyOrder(13, 26, 39);
-        pipeline.run();
-    }
-
-    @Test
-    @Category({ValidatesRunner.class, UsesStatefulParDo.class})
-    public void testValueStateTaggedOutput() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        final String stateId = "foo";
-
-        final TupleTag<Integer> evenTag = new TupleTag<Integer>() {};
-        final TupleTag<Integer> oddTag = new TupleTag<Integer>() {};
-
-        DoFn<KV<String, Integer>, Integer> fn =
-                new DoFn<KV<String, Integer>, Integer>() {
-
-                    @StateId(stateId)
-                    private final StateSpec<ValueState<Integer>> intState =
-                            StateSpecs.value(VarIntCoder.of());
-
-                    @ProcessElement
-                    public void processElement(
-                            ProcessContext c, @StateId(stateId) ValueState<Integer> state) {
-                        Integer currentValue = MoreObjects.firstNonNull(state.read(), 0);
-                        if (currentValue % 2 == 0) {
-                            c.output(currentValue);
-                        } else {
-                            c.output(oddTag, currentValue);
-                        }
-                        state.write(currentValue + 1);
-                    }
-                };
-
-        PCollectionTuple output =
-                pipeline.apply(
-                        Create.of(
-                                KV.of("hello", 42),
-                                KV.of("hello", 97),
-                                KV.of("hello", 84),
-                                KV.of("goodbye", 33),
-                                KV.of("hello", 859),
-                                KV.of("goodbye", 83945)))
-                        .apply(ParDo.of(fn).withOutputTags(evenTag, TupleTagList.of(oddTag)));
-
-        PCollection<Integer> evens = output.get(evenTag);
-        PCollection<Integer> odds = output.get(oddTag);
-
-        // There are 0 and 2 from "hello" and just 0 from "goodbye"
-        PAssert.that(evens).containsInAnyOrder(0, 2, 0);
-
-        // There are 1 and 3 from "hello" and just "1" from "goodbye"
-        PAssert.that(odds).containsInAnyOrder(1, 3, 1);
-        pipeline.run();
-    }
-
-    @Test
-    @Category({ValidatesRunner.class, UsesStatefulParDo.class, UsesMapState.class})
-    public void testMapStateCoderInference() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        final String stateId = "foo";
-        final String countStateId = "count";
-        Coder<MyInteger> myIntegerCoder = MyIntegerCoder.of();
-        pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder);
-
-        DoFn<KV<String, KV<String, Integer>>, KV<String, MyInteger>> fn =
-                new DoFn<KV<String, KV<String, Integer>>, KV<String, MyInteger>>() {
-
-                    @StateId(stateId)
-                    private final StateSpec<MapState<String, MyInteger>> mapState = StateSpecs.map();
-
-                    @StateId(countStateId)
-                    private final StateSpec<CombiningState<Integer, int[], Integer>>
-                            countState = StateSpecs.combiningFromInputInternal(VarIntCoder.of(),
-                            Sum.ofIntegers());
-
-                    @ProcessElement
-                    public void processElement(
-                            ProcessContext c, @StateId(stateId) MapState<String, MyInteger> state,
-                            @StateId(countStateId) CombiningState<Integer, int[], Integer>
-                                    count) {
-                        KV<String, Integer> value = c.element().getValue();
-                        state.put(value.getKey(), new MyInteger(value.getValue()));
-                        count.add(1);
-                        if (count.read() >= 4) {
-                            Iterable<Map.Entry<String, MyInteger>> iterate = state.entries().read();
-                            for (Map.Entry<String, MyInteger> entry : iterate) {
-                                c.output(KV.of(entry.getKey(), entry.getValue()));
-                            }
-                        }
-                    }
-                };
-
-        PCollection<KV<String, MyInteger>> output =
-                pipeline.apply(
-                        Create.of(
-                                KV.of("hello", KV.of("a", 97)), KV.of("hello", KV.of("b", 42)),
-                                KV.of("hello", KV.of("b", 42)), KV.of("hello", KV.of("c", 12))))
-                        .apply(ParDo.of(fn)).setCoder(KvCoder.of(StringUtf8Coder.of(), myIntegerCoder));
-
-        PAssert.that(output).containsInAnyOrder(KV.of("a", new MyInteger(97)),
-                KV.of("b", new MyInteger(42)), KV.of("c", new MyInteger(12)));
-        pipeline.run();
-    }
-
-
-    private static class WindowOddEvenBuckets extends NonMergingWindowFn<Long, IntervalWindow> {
-        private static final IntervalWindow EVEN_WINDOW =
-                new IntervalWindow(
-                        BoundedWindow.TIMESTAMP_MIN_VALUE, GlobalWindow.INSTANCE.maxTimestamp());
-        private static final IntervalWindow ODD_WINDOW =
-                new IntervalWindow(
-                        BoundedWindow.TIMESTAMP_MIN_VALUE, GlobalWindow.INSTANCE.maxTimestamp().minus(1));
-
-        @Override
-        public Collection<IntervalWindow> assignWindows(AssignContext c) throws Exception {
-            if (c.element() % 2 == 0) {
-                return Collections.singleton(EVEN_WINDOW);
-            }
-            return Collections.singleton(ODD_WINDOW);
-        }
-
-        @Override
-        public boolean isCompatible(WindowFn<?, ?> other) {
-            return other instanceof WindowOddEvenBuckets;
-        }
-
-        @Override
-        public Coder<IntervalWindow> windowCoder() {
-            return new IntervalWindow.IntervalWindowCoder();
-        }
-
-        @Override
-        public WindowMappingFn<IntervalWindow> getDefaultWindowMappingFn() {
-            throw new UnsupportedOperationException(
-                    String.format("Can't use %s for side inputs", getClass().getSimpleName()));
-        }
-    }
-
-
-    static class TestDoFn extends DoFn<Integer, String> {
-        enum State {NOT_SET_UP, UNSTARTED, STARTED, PROCESSING, FINISHED}
-
-        State state = State.NOT_SET_UP;
-
-        final List<PCollectionView<Integer>> sideInputViews = new ArrayList<>();
-        final List<TupleTag<String>> additionalOutputTupleTags = new ArrayList<>();
-
-        public TestDoFn() {
-        }
-
-        public TestDoFn(List<PCollectionView<Integer>> sideInputViews,
-                        List<TupleTag<String>> additionalOutputTupleTags) {
-            this.sideInputViews.addAll(sideInputViews);
-            this.additionalOutputTupleTags.addAll(additionalOutputTupleTags);
-        }
-
-        @Setup
-        public void prepare() {
-            assertEquals(State.NOT_SET_UP, state);
-            state = State.UNSTARTED;
-        }
-
-        @StartBundle
-        public void startBundle() {
-            assertThat(state,
-                anyOf(equalTo(State.UNSTARTED), equalTo(State.FINISHED)));
-
-            state = State.STARTED;
-        }
-
-        @ProcessElement
-        public void processElement(ProcessContext c) {
-            System.out.println("Recv elem: " + c.element());
-            assertThat(state,
-                anyOf(equalTo(State.STARTED), equalTo(State.PROCESSING)));
-            state = State.PROCESSING;
-            outputToAllWithSideInputs(c, "processing: " + c.element());
-        }
-
-        @FinishBundle
-        public void finishBundle(FinishBundleContext c) {
-            assertThat(state,
-                anyOf(equalTo(State.STARTED), equalTo(State.PROCESSING)));
-            state = State.FINISHED;
-            c.output("finished", BoundedWindow.TIMESTAMP_MIN_VALUE, GlobalWindow.INSTANCE);
-            for (TupleTag<String> additionalOutputTupleTag : additionalOutputTupleTags) {
-                c.output(
-                    additionalOutputTupleTag,
-                    additionalOutputTupleTag.getId() + ": " + "finished",
-                    BoundedWindow.TIMESTAMP_MIN_VALUE,
-                    GlobalWindow.INSTANCE);
-            }
-        }
-
-        private void outputToAllWithSideInputs(ProcessContext c, String value) {
-            if (!sideInputViews.isEmpty()) {
-                List<Integer> sideInputValues = new ArrayList<>();
-                for (PCollectionView<Integer> sideInputView : sideInputViews) {
-                    sideInputValues.add(c.sideInput(sideInputView));
-                }
-                value += ": " + sideInputValues;
-            }
-            c.output(value);
-            for (TupleTag<String> additionalOutputTupleTag : additionalOutputTupleTags) {
-                c.output(additionalOutputTupleTag,
-                    additionalOutputTupleTag.getId() + ": " + value);
-            }
-        }
-    }
-
-    private static class MyInteger implements Comparable<MyInteger> {
-        private final int value;
-
-        MyInteger(int value) {
-            this.value = value;
-        }
-
-        public int getValue() {
-            return value;
-        }
-
-        @Override
-        public boolean equals(Object o) {
-            if (this == o) {
-                return true;
-            }
-
-            if (!(o instanceof MyInteger)) {
-                return false;
-            }
-
-            MyInteger myInteger = (MyInteger) o;
-
-            return value == myInteger.value;
-
-        }
-
-        @Override
-        public int hashCode() {
-            return value;
-        }
-
-        @Override
-        public int compareTo(MyInteger o) {
-            return Integer.compare(this.getValue(), o.getValue());
-        }
-
-        @Override
-        public String toString() {
-            return "MyInteger{" + "value=" + value + '}';
-        }
-    }
-
-    private static class MyIntegerCoder extends AtomicCoder<MyInteger> {
-        private static final MyIntegerCoder INSTANCE = new MyIntegerCoder();
-
-        private final VarIntCoder delegate = VarIntCoder.of();
-
-        public static MyIntegerCoder of() {
-            return INSTANCE;
-        }
-
-        @Override
-        public void encode(MyInteger value, OutputStream outStream)
-                throws CoderException, IOException {
-            delegate.encode(value.getValue(), outStream);
-        }
-
-        @Override
-        public MyInteger decode(InputStream inStream) throws CoderException,
-                IOException {
-            return new MyInteger(delegate.decode(inStream));
-        }
-    }
-
-    /** PAssert "matcher" for expected output. */
-    static class HasExpectedOutput
-        implements SerializableFunction<Iterable<String>, Void>, Serializable {
-        private final List<Integer> inputs;
-        private final List<Integer> sideInputs;
-        private final String additionalOutput;
-        private final boolean ordered;
-
-        public static HasExpectedOutput forInput(List<Integer> inputs) {
-            return new HasExpectedOutput(
-                new ArrayList<Integer>(inputs),
-                new ArrayList<Integer>(),
-                "",
-                false);
-        }
-
-        private HasExpectedOutput(List<Integer> inputs,
-                                  List<Integer> sideInputs,
-                                  String additionalOutput,
-                                  boolean ordered) {
-            this.inputs = inputs;
-            this.sideInputs = sideInputs;
-            this.additionalOutput = additionalOutput;
-            this.ordered = ordered;
-        }
-
-        public HasExpectedOutput andSideInputs(Integer... sideInputValues) {
-            List<Integer> sideInputs = new ArrayList<>();
-            for (Integer sideInputValue : sideInputValues) {
-                sideInputs.add(sideInputValue);
-            }
-            return new HasExpectedOutput(inputs, sideInputs, additionalOutput, ordered);
-        }
-
-        public HasExpectedOutput fromOutput(TupleTag<String> outputTag) {
-            return fromOutput(outputTag.getId());
-        }
-        public HasExpectedOutput fromOutput(String outputId) {
-            return new HasExpectedOutput(inputs, sideInputs, outputId, ordered);
-        }
-
-        public HasExpectedOutput inOrder() {
-            return new HasExpectedOutput(inputs, sideInputs, additionalOutput, true);
-        }
-
-        @Override
-        public Void apply(Iterable<String> outputs) {
-            List<String> processeds = new ArrayList<>();
-            List<String> finisheds = new ArrayList<>();
-            for (String output : outputs) {
-                if (output.contains("finished")) {
-                    finisheds.add(output);
-                } else {
-                    processeds.add(output);
-                }
-            }
-
-            String sideInputsSuffix;
-            if (sideInputs.isEmpty()) {
-                sideInputsSuffix = "";
-            } else {
-                sideInputsSuffix = ": " + sideInputs;
-            }
-
-            String additionalOutputPrefix;
-            if (additionalOutput.isEmpty()) {
-                additionalOutputPrefix = "";
-            } else {
-                additionalOutputPrefix = additionalOutput + ": ";
-            }
-
-            List<String> expectedProcesseds = new ArrayList<>();
-            for (Integer input : inputs) {
-                expectedProcesseds.add(
-                    additionalOutputPrefix + "processing: " + input + sideInputsSuffix);
-            }
-            String[] expectedProcessedsArray =
-                expectedProcesseds.toArray(new String[expectedProcesseds.size()]);
-            if (!ordered || expectedProcesseds.isEmpty()) {
-                assertThat(processeds, containsInAnyOrder(expectedProcessedsArray));
-            } else {
-                assertThat(processeds, contains(expectedProcessedsArray));
-            }
-
-            for (String finished : finisheds) {
-                assertEquals(additionalOutputPrefix + "finished", finished);
-            }
-
-            return null;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
new file mode 100644
index 0000000..11c7c94
--- /dev/null
+++ b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
@@ -0,0 +1,219 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime.state;
+
+import avro.shaded.com.google.common.collect.Maps;
+import org.apache.beam.runners.jstorm.translation.runtime.TimerServiceImpl;
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.alibaba.jstorm.cache.rocksdb.RocksDbKvStoreManagerFactory;
+import com.alibaba.jstorm.utils.KryoSerializer;
+
+import org.apache.beam.runners.core.StateNamespaces;
+import org.apache.beam.runners.core.StateTags;
+import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.CoderRegistry;
+import org.apache.beam.sdk.state.*;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.Max;
+import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
+import org.joda.time.Instant;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.util.Iterator;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.hasEntry;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+
+/**
+ * Tests for {@link JStormStateInternals}.
+ */
+@RunWith(JUnit4.class)
+public class JStormStateInternalsTest {
+
+    @Rule
+    public final TemporaryFolder tmp = new TemporaryFolder();
+
+    private JStormStateInternals<String> jstormStateInternals;
+
+    @Before
+    public void setup() throws Exception {
+        IKvStoreManager kvStoreManager = RocksDbKvStoreManagerFactory.getManager(
+                Maps.newHashMap(),
+                "test",
+                tmp.toString(),
+                new KryoSerializer(Maps.newHashMap()));
+        jstormStateInternals = new JStormStateInternals("key-1", kvStoreManager, new TimerServiceImpl(), 0);
+    }
+
+    @Test
+    public void testValueState() throws Exception {
+        ValueState<Integer> valueState = jstormStateInternals.state(
+                StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
+        valueState.write(Integer.MIN_VALUE);
+        assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
+        valueState.write(Integer.MAX_VALUE);
+        assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
+    }
+
+    @Test
+    public void testValueStateIdenticalId() throws Exception {
+        ValueState<Integer> valueState = jstormStateInternals.state(
+                StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
+        ValueState<Integer> valueStateIdentical = jstormStateInternals.state(
+                StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
+
+        valueState.write(Integer.MIN_VALUE);
+        assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
+        assertEquals(Integer.MIN_VALUE, valueStateIdentical.read().longValue());
+        valueState.write(Integer.MAX_VALUE);
+        assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
+        assertEquals(Integer.MAX_VALUE, valueStateIdentical.read().longValue());
+    }
+
+    @Test
+    public void testBagState() throws Exception {
+        BagState<Integer> bagStateA = jstormStateInternals.state(
+                StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
+        BagState<Integer> bagStateB = jstormStateInternals.state(
+                StateNamespaces.global(), StateTags.bag("state-id-b", BigEndianIntegerCoder.of()));
+
+        bagStateA.add(1);
+        bagStateA.add(0);
+        bagStateA.add(Integer.MAX_VALUE);
+
+        bagStateB.add(0);
+        bagStateB.add(Integer.MIN_VALUE);
+
+        Iterable<Integer> bagA = bagStateA.read();
+        Iterable<Integer> bagB = bagStateB.read();
+        assertThat(bagA, containsInAnyOrder(1, 0, Integer.MAX_VALUE));
+        assertThat(bagB, containsInAnyOrder(0, Integer.MIN_VALUE));
+
+        bagStateA.clear();
+        bagStateA.add(1);
+        bagStateB.add(0);
+        assertThat(bagStateA.read(), containsInAnyOrder(1));
+        assertThat(bagStateB.read(), containsInAnyOrder(0, 0, Integer.MIN_VALUE));
+    }
+
+    @Test
+    public void testCombiningState() throws Exception {
+        Combine.CombineFn<Integer, int[], Integer> combineFn = Max.ofIntegers();
+        Coder<int[]> accumCoder = combineFn.getAccumulatorCoder(
+            CoderRegistry.createDefault(), BigEndianIntegerCoder.of());
+
+        CombiningState<Integer, int[], Integer> combiningState = jstormStateInternals.state(
+                StateNamespaces.global(),
+                StateTags.combiningValue(
+                        "state-id-a",
+                        accumCoder,
+                        combineFn));
+        assertEquals(Integer.MIN_VALUE, combiningState.read().longValue());
+        combiningState.add(10);
+        assertEquals(10, combiningState.read().longValue());
+        combiningState.add(1);
+        assertEquals(10, combiningState.read().longValue());
+        combiningState.add(Integer.MAX_VALUE);
+        assertEquals(Integer.MAX_VALUE, combiningState.read().longValue());
+    }
+
+    @Test
+    public void testWatermarkHoldState() throws Exception {
+        WatermarkHoldState watermarkHoldState = jstormStateInternals.state(
+                StateNamespaces.global(),
+                StateTags.watermarkStateInternal(
+                        "state-id-a",
+                        TimestampCombiner.EARLIEST));
+        watermarkHoldState.add(new Instant(1));
+        assertEquals(1, watermarkHoldState.read().getMillis());
+        watermarkHoldState.add(new Instant(Integer.MIN_VALUE));
+        assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
+        watermarkHoldState.add(new Instant(Integer.MAX_VALUE));
+        assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
+    }
+
+    @Test
+    public void testMapState() throws Exception {
+        MapState<Integer, Integer> mapStateA = jstormStateInternals.state(
+                StateNamespaces.global(), StateTags.map("state-id-a", BigEndianIntegerCoder.of(), BigEndianIntegerCoder.of()));
+        mapStateA.put(1, 1);
+        mapStateA.put(2, 22);
+        mapStateA.put(1, 12);
+
+        Iterable<Integer> keys = mapStateA.keys().read();
+        Iterable<Integer> values = mapStateA.values().read();
+        assertThat(keys, containsInAnyOrder(1, 2));
+        assertThat(values, containsInAnyOrder(12, 22));
+
+        Iterable<Map.Entry<Integer, Integer>> entries =  mapStateA.entries().read();
+        Iterator<Map.Entry<Integer, Integer>> itr = entries.iterator();
+        Map.Entry<Integer, Integer> entry = itr.next();
+        assertEquals((long) entry.getKey(), 1l);
+        assertEquals((long) entry.getValue(), 12l);
+        entry = itr.next();
+        assertEquals((long) entry.getKey(), 2l);
+        assertEquals((long) entry.getValue(), 22l);
+        assertEquals(false, itr.hasNext());
+
+        mapStateA.remove(1);
+        keys = mapStateA.keys().read();
+        values = mapStateA.values().read();
+        assertThat(keys, containsInAnyOrder(2));
+        assertThat(values, containsInAnyOrder(22));
+
+        entries =  mapStateA.entries().read();
+        itr = entries.iterator();
+        entry = itr.next();
+        assertEquals((long) entry.getKey(), 2l);
+        assertEquals((long) entry.getValue(), 22l);
+        assertEquals(false, itr.hasNext());
+    }
+
+    @Test
+    public void testMassiveDataOfBagState() {
+        BagState<Integer> bagStateA = jstormStateInternals.state(
+                StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
+
+        int count = 10000;
+        int n = 1;
+        while(n <= count) {
+            bagStateA.add(n);
+            n++;
+        }
+
+        int readCount = 0;
+        int readN = 0;
+        Iterator<Integer> itr = bagStateA.read().iterator();
+        while(itr.hasNext()) {
+            readN += itr.next();
+            readCount++;
+        }
+
+        assertEquals((long) readN, ((1 + count) * count) / 2);
+        assertEquals((long) readCount, count);
+    }
+}


[20/53] [abbrv] beam git commit: jstorm-runner: Fix checkstyle error

Posted by pe...@apache.org.
jstorm-runner: Fix checkstyle error


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/5a15d548
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/5a15d548
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/5a15d548

Branch: refs/heads/jstorm-runner
Commit: 5a15d5488f9438695948e72af08ada4c263471d7
Parents: 78a5076
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Fri Jul 14 14:14:49 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:57 2017 +0800

----------------------------------------------------------------------
 .../runners/jstorm/JStormPipelineOptions.java   |   3 +
 .../beam/runners/jstorm/JStormRunner.java       |  16 +-
 .../beam/runners/jstorm/TestJStormRunner.java   |   2 +-
 .../serialization/ImmutableListSerializer.java  |   4 +-
 .../serialization/ImmutableMapSerializer.java   |   3 +
 .../serialization/ImmutableSetSerializer.java   |   3 +
 .../KvStoreIterableSerializer.java              |   3 +
 .../SdkRepackImmuListSerializer.java            |   3 +
 .../SdkRepackImmuSetSerializer.java             |   3 +
 .../UnmodifiableCollectionsSerializer.java      |   5 +-
 .../translation/JStormPipelineTranslator.java   | 186 +++++++++++++++++++
 .../translation/StormPipelineTranslator.java    | 186 -------------------
 .../jstorm/translation/TranslationContext.java  |   9 +-
 .../translation/runtime/AbstractComponent.java  |   4 +-
 .../translation/runtime/AdaptorBasicBolt.java   |   5 +-
 .../translation/runtime/AdaptorBasicSpout.java  |   5 +-
 .../translation/runtime/DoFnExecutor.java       |  16 +-
 .../jstorm/translation/runtime/Executor.java    |   7 +-
 .../translation/runtime/ExecutorContext.java    |   3 +
 .../translation/runtime/ExecutorsBolt.java      |  15 +-
 .../translation/runtime/FlattenExecutor.java    |   6 +-
 .../runtime/GroupByWindowExecutor.java          |   5 +
 .../runtime/MultiOutputDoFnExecutor.java        |   7 +-
 .../runtime/MultiStatefulDoFnExecutor.java      |   4 +
 .../runtime/StatefulDoFnExecutor.java           |   4 +
 .../translation/runtime/TimerServiceImpl.java   |   8 +-
 .../translation/runtime/TxExecutorsBolt.java    |   5 +-
 .../runtime/TxUnboundedSourceSpout.java         |   5 +-
 .../runtime/UnboundedSourceSpout.java           |   5 +-
 .../runtime/WindowAssignExecutor.java           |   7 +-
 .../runtime/state/JStormBagState.java           |   5 +-
 .../runtime/state/JStormMapState.java           |   7 +-
 .../translator/FlattenTranslator.java           |   6 +-
 .../translator/GroupByKeyTranslator.java        |   5 +
 .../translator/ParDoBoundMultiTranslator.java   |   2 +-
 .../translator/ParDoBoundTranslator.java        |   4 +-
 .../jstorm/translation/translator/Stream.java   |  11 +-
 .../translator/TransformTranslator.java         |   4 +
 .../translation/translator/ViewTranslator.java  |  18 +-
 .../translator/WindowAssignTranslator.java      |   7 +-
 .../jstorm/translation/util/CommonInstance.java |   5 +-
 .../beam/runners/jstorm/util/RunnerUtils.java   |  12 +-
 .../jstorm/util/SerializedPipelineOptions.java  |   2 +-
 .../jstorm/util/SingletonKeyedWorkItem.java     |   3 +-
 .../runtime/state/JStormStateInternalsTest.java |  14 +-
 45 files changed, 384 insertions(+), 258 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormPipelineOptions.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormPipelineOptions.java
index 2a87756..114877a 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormPipelineOptions.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormPipelineOptions.java
@@ -64,6 +64,9 @@ public interface JStormPipelineOptions extends PipelineOptions {
   Map getParallelismNumMap();
   void setParallelismNumMap(Map parallelismNumMap);
 
+  /**
+   * Default value factory for topology configuration of JStorm.
+   */
   class DefaultMapValueFactory implements DefaultValueFactory<Map> {
     @Override
     public Map create(PipelineOptions pipelineOptions) {

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
index 5375d6e..00ec7f6 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
@@ -78,17 +78,17 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
   }
 
   /**
-   * convert pipeline options to storm configuration format
-   *
+   * convert pipeline options to storm configuration format.
    * @param options
    * @return
    */
   private Config convertPipelineOptionsToConfig(JStormPipelineOptions options) {
     Config config = new Config();
-    if (options.getLocalMode())
+    if (options.getLocalMode()) {
       config.put(Config.STORM_CLUSTER_MODE, "local");
-    else
+    } else {
       config.put(Config.STORM_CLUSTER_MODE, "distributed");
+    }
 
     Config.setNumWorkers(config, options.getWorkerNumber());
 
@@ -161,8 +161,9 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
       component = spout;
     } else {
       AdaptorBasicBolt bolt = context.getBolt(id);
-      if (bolt != null)
+      if (bolt != null) {
         component = bolt;
+      }
     }
 
     return component;
@@ -202,10 +203,11 @@ public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
 
       // add stream output declare for "from" component
       AbstractComponent component = getComponent(srcBoltId, context);
-      if (grouping.getType().equals(Stream.Grouping.Type.FIELDS))
+      if (grouping.getType().equals(Stream.Grouping.Type.FIELDS)) {
         component.addKVOutputField(streamId);
-      else
+      } else {
         component.addOutputField(streamId);
+      }
 
       // "to" component declares grouping to "from" component
       switch (grouping.getType()) {

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
index e27efc0..b1b0379 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
@@ -2,7 +2,6 @@ package org.apache.beam.runners.jstorm;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 
-import avro.shaded.com.google.common.collect.Maps;
 import com.alibaba.jstorm.common.metric.AsmMetric;
 import com.alibaba.jstorm.metric.AsmMetricRegistry;
 import com.alibaba.jstorm.metric.AsmWindow;
@@ -11,6 +10,7 @@ import com.alibaba.jstorm.metric.MetaType;
 import com.alibaba.jstorm.metric.MetricType;
 import com.alibaba.jstorm.utils.JStormUtils;
 import com.google.common.base.Optional;
+import com.google.common.collect.Maps;
 import java.io.IOException;
 import java.util.Iterator;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
index c479f26..268774c 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
@@ -10,8 +10,10 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableTable;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Table;
-import org.apache.beam.runners.jstorm.util.RunnerUtils;
 
+/**
+ * Specific serializer of {@link Kryo} for ImmutableList.
+ */
 public class ImmutableListSerializer extends Serializer<ImmutableList<Object>> {
 
   private static final boolean DOES_NOT_ACCEPT_NULL = false;

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
index 77eede3..6b998fc 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
@@ -11,6 +11,9 @@ import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.Map;
 
+/**
+ * Specific serializer of {@link Kryo} for ImmutableMap.
+ */
 public class ImmutableMapSerializer extends Serializer<ImmutableMap<Object, ? extends Object>> {
 
   private static final boolean DOES_NOT_ACCEPT_NULL = true;

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
index 3a43b2b..edc7b09 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
@@ -8,6 +8,9 @@ import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Sets;
 
+/**
+ * Specific serializer of {@link Kryo} for ImmutableSet.
+ */
 public class ImmutableSetSerializer extends Serializer<ImmutableSet<Object>> {
 
   private static final boolean DOES_NOT_ACCEPT_NULL = false;

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
index b47f3b7..3835816 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
@@ -9,6 +9,9 @@ import com.google.common.collect.Lists;
 import java.util.Iterator;
 import java.util.List;
 
+/**
+ * Specific serializer of {@link Kryo} for KvStoreIterable.
+ */
 public class KvStoreIterableSerializer extends Serializer<KvStoreIterable<Object>> {
 
   public KvStoreIterableSerializer() {

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
index dd4272c..f1ed644 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
@@ -12,6 +12,9 @@ import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableTable;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.Lists;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.Table;
 
+/**
+ * Specific serializer of {@link Kryo} for Beam SDK repackaged ImmutableList.
+ */
 public class SdkRepackImmuListSerializer extends Serializer<ImmutableList<Object>> {
 
   private static final boolean DOES_NOT_ACCEPT_NULL = false;

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
index 6973c82..d1ed046 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
@@ -8,6 +8,9 @@ import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableSet;
 import org.apache.beam.sdk.repackaged.com.google.common.collect.Sets;
 
+/**
+ * Specific serializer of {@link Kryo} for Beam SDK repackaged ImmutableSet.
+ */
 public class SdkRepackImmuSetSerializer extends Serializer<ImmutableSet<Object>> {
 
   private static final boolean DOES_NOT_ACCEPT_NULL = false;

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
index bcee778..33343fc 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
@@ -21,6 +21,9 @@ import java.util.SortedSet;
 import java.util.TreeMap;
 import java.util.TreeSet;
 
+/**
+ * Specific serializer of {@link Kryo} for Unmodifiable Collection.
+ */
 public class UnmodifiableCollectionsSerializer extends Serializer<Object> {
 
   private static final Field SOURCE_COLLECTION_FIELD;
@@ -83,7 +86,7 @@ public class UnmodifiableCollectionsSerializer extends Serializer<Object> {
     }
   }
 
-  private static enum UnmodifiableCollection {
+  private enum UnmodifiableCollection {
     COLLECTION(
         Collections.unmodifiableCollection(Arrays.asList("")).getClass(),
         SOURCE_COLLECTION_FIELD) {

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormPipelineTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormPipelineTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormPipelineTranslator.java
new file mode 100644
index 0000000..1449a43
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormPipelineTranslator.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterables;
+import java.util.List;
+import org.apache.beam.runners.core.construction.PTransformMatchers;
+import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
+import org.apache.beam.runners.jstorm.translation.translator.TransformTranslator;
+import org.apache.beam.runners.jstorm.translation.translator.ViewTranslator;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.runners.AppliedPTransform;
+import org.apache.beam.sdk.runners.PTransformOverride;
+import org.apache.beam.sdk.runners.TransformHierarchy;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.View;
+import org.apache.beam.sdk.util.InstanceBuilder;
+import org.apache.beam.sdk.values.PValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Pipleline translator of JStorm.
+ */
+public class JStormPipelineTranslator extends Pipeline.PipelineVisitor.Defaults {
+  private static final Logger LOG = LoggerFactory.getLogger(JStormPipelineTranslator.class);
+  private TranslationContext context;
+  private int depth = 0;
+
+  public JStormPipelineTranslator(TranslationContext context) {
+    this.context = context;
+  }
+
+  public void translate(Pipeline pipeline) {
+    List<PTransformOverride> transformOverrides =
+        ImmutableList.<PTransformOverride>builder()
+            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsIterable.class),
+                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsIterable.class)))
+            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsList.class),
+                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsList.class)))
+            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMap.class),
+                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMap.class)))
+            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMultimap.class),
+                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMultimap.class)))
+            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsSingleton.class),
+                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsSingleton.class)))
+            .add(PTransformOverride.of(
+                PTransformMatchers.classEqualTo(Combine.GloballyAsSingletonView.class),
+                new ReflectiveOneToOneOverrideFactory(
+                    (ViewTranslator.CombineGloballyAsSingletonView.class))))
+            .build();
+    pipeline.replaceAll(transformOverrides);
+    pipeline.traverseTopologically(this);
+  }
+
+  @Override
+  public CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {
+    LOG.info(genSpaces(this.depth) + "enterCompositeTransform- " + node);
+    this.depth++;
+
+    // check if current composite transforms need to be translated.
+    // If not, all sub transforms will be translated in visitPrimitiveTransform.
+    PTransform<?, ?> transform = node.getTransform();
+    if (transform != null) {
+      TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
+
+      if (translator != null && applyCanTranslate(transform, node, translator)) {
+        applyStreamingTransform(transform, node, translator);
+        LOG.info(genSpaces(this.depth) + "translated-" + node);
+        return CompositeBehavior.DO_NOT_ENTER_TRANSFORM;
+      }
+    }
+    return CompositeBehavior.ENTER_TRANSFORM;
+  }
+
+  public void leaveCompositeTransform(TransformHierarchy.Node node) {
+    this.depth--;
+    LOG.info(genSpaces(this.depth) + "leaveCompositeTransform- " + node);
+  }
+
+  public void visitPrimitiveTransform(TransformHierarchy.Node node) {
+    LOG.info(genSpaces(this.depth) + "visitPrimitiveTransform- " + node);
+
+    if (!node.isRootNode()) {
+      PTransform<?, ?> transform = node.getTransform();
+      TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
+      if (translator == null || !applyCanTranslate(transform, node, translator)) {
+        LOG.info(node.getTransform().getClass().toString());
+        throw new UnsupportedOperationException(
+            "The transform " + transform + " is currently not supported.");
+      }
+      applyStreamingTransform(transform, node, translator);
+    }
+  }
+
+  public void visitValue(PValue value, TransformHierarchy.Node node) {
+    LOG.info(genSpaces(this.depth) + "visiting value {}", value);
+  }
+
+  private <T extends PTransform<?, ?>> void applyStreamingTransform(
+      PTransform<?, ?> transform,
+      TransformHierarchy.Node node,
+      TransformTranslator<?> translator) {
+    @SuppressWarnings("unchecked")
+    T typedTransform = (T) transform;
+    @SuppressWarnings("unchecked")
+    TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
+
+    context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
+    typedTranslator.translateNode(typedTransform, context);
+
+    // Maintain PValue to TupleTag map for side inputs translation.
+    context.getUserGraphContext().recordOutputTaggedPValue();
+  }
+
+  private <T extends PTransform<?, ?>> boolean applyCanTranslate(
+      PTransform<?, ?> transform,
+      TransformHierarchy.Node node,
+      TransformTranslator<?> translator) {
+    @SuppressWarnings("unchecked")
+    T typedTransform = (T) transform;
+    @SuppressWarnings("unchecked")
+    TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
+
+    context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
+
+    return typedTranslator.canTranslate(typedTransform, context);
+  }
+
+  /**
+   * Utility formatting method.
+   *
+   * @param n number of spaces to generate
+   * @return String with "|" followed by n spaces
+   */
+  protected static String genSpaces(int n) {
+    StringBuilder builder = new StringBuilder();
+    for (int i = 0; i < n; i++) {
+      builder.append("|   ");
+    }
+    return builder.toString();
+  }
+
+  private static class ReflectiveOneToOneOverrideFactory<
+      InputT extends PValue,
+      OutputT extends PValue,
+      TransformT extends PTransform<InputT, OutputT>>
+      extends SingleInputOutputOverrideFactory<InputT, OutputT, TransformT> {
+    private final Class<PTransform<InputT, OutputT>> replacement;
+
+    private ReflectiveOneToOneOverrideFactory(
+        Class<PTransform<InputT, OutputT>> replacement) {
+      this.replacement = replacement;
+    }
+
+    @Override
+    public PTransformReplacement<InputT, OutputT> getReplacementTransform(
+        AppliedPTransform<InputT, OutputT, TransformT> appliedPTransform) {
+      PTransform<InputT, OutputT> originalPTransform = appliedPTransform.getTransform();
+      PTransform<InputT, OutputT> replacedPTransform = InstanceBuilder.ofType(replacement)
+          .withArg(
+              (Class<PTransform<InputT, OutputT>>) originalPTransform.getClass(),
+              originalPTransform)
+          .build();
+      InputT inputT = (InputT) Iterables.getOnlyElement(appliedPTransform.getInputs().values());
+      return PTransformReplacement.of(inputT, replacedPTransform);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java
deleted file mode 100644
index 6d6f1c6..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/StormPipelineTranslator.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Iterables;
-import java.util.List;
-import org.apache.beam.runners.core.construction.PTransformMatchers;
-import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
-import org.apache.beam.runners.jstorm.translation.translator.TransformTranslator;
-import org.apache.beam.runners.jstorm.translation.translator.ViewTranslator;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.runners.AppliedPTransform;
-import org.apache.beam.sdk.runners.PTransformOverride;
-import org.apache.beam.sdk.runners.TransformHierarchy;
-import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.transforms.View;
-import org.apache.beam.sdk.util.InstanceBuilder;
-import org.apache.beam.sdk.values.PValue;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Pipleline translator of Storm
- */
-public class StormPipelineTranslator extends Pipeline.PipelineVisitor.Defaults {
-  private static final Logger LOG = LoggerFactory.getLogger(StormPipelineTranslator.class);
-  private TranslationContext context;
-  private int depth = 0;
-
-  public StormPipelineTranslator(TranslationContext context) {
-    this.context = context;
-  }
-
-  public void translate(Pipeline pipeline) {
-    List<PTransformOverride> transformOverrides =
-        ImmutableList.<PTransformOverride>builder()
-            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsIterable.class),
-                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsIterable.class)))
-            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsList.class),
-                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsList.class)))
-            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMap.class),
-                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMap.class)))
-            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsMultimap.class),
-                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsMultimap.class)))
-            .add(PTransformOverride.of(PTransformMatchers.classEqualTo(View.AsSingleton.class),
-                new ReflectiveOneToOneOverrideFactory(ViewTranslator.ViewAsSingleton.class)))
-            .add(PTransformOverride.of(
-                PTransformMatchers.classEqualTo(Combine.GloballyAsSingletonView.class),
-                new ReflectiveOneToOneOverrideFactory(
-                    (ViewTranslator.CombineGloballyAsSingletonView.class))))
-            .build();
-    pipeline.replaceAll(transformOverrides);
-    pipeline.traverseTopologically(this);
-  }
-
-  @Override
-  public CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {
-    LOG.info(genSpaces(this.depth) + "enterCompositeTransform- " + node);
-    this.depth++;
-
-    // check if current composite transforms need to be translated.
-    // If not, all sub transforms will be translated in visitPrimitiveTransform.
-    PTransform<?, ?> transform = node.getTransform();
-    if (transform != null) {
-      TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
-
-      if (translator != null && applyCanTranslate(transform, node, translator)) {
-        applyStreamingTransform(transform, node, translator);
-        LOG.info(genSpaces(this.depth) + "translated-" + node);
-        return CompositeBehavior.DO_NOT_ENTER_TRANSFORM;
-      }
-    }
-    return CompositeBehavior.ENTER_TRANSFORM;
-  }
-
-  public void leaveCompositeTransform(TransformHierarchy.Node node) {
-    this.depth--;
-    LOG.info(genSpaces(this.depth) + "leaveCompositeTransform- " + node);
-  }
-
-  public void visitPrimitiveTransform(TransformHierarchy.Node node) {
-    LOG.info(genSpaces(this.depth) + "visitPrimitiveTransform- " + node);
-
-    if (!node.isRootNode()) {
-      PTransform<?, ?> transform = node.getTransform();
-      TransformTranslator translator = TranslatorRegistry.getTranslator(transform);
-      if (translator == null || !applyCanTranslate(transform, node, translator)) {
-        LOG.info(node.getTransform().getClass().toString());
-        throw new UnsupportedOperationException(
-            "The transform " + transform + " is currently not supported.");
-      }
-      applyStreamingTransform(transform, node, translator);
-    }
-  }
-
-  public void visitValue(PValue value, TransformHierarchy.Node node) {
-    LOG.info(genSpaces(this.depth) + "visiting value {}", value);
-  }
-
-  private <T extends PTransform<?, ?>> void applyStreamingTransform(
-      PTransform<?, ?> transform,
-      TransformHierarchy.Node node,
-      TransformTranslator<?> translator) {
-    @SuppressWarnings("unchecked")
-    T typedTransform = (T) transform;
-    @SuppressWarnings("unchecked")
-    TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
-
-    context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
-    typedTranslator.translateNode(typedTransform, context);
-
-    // Maintain PValue to TupleTag map for side inputs translation.
-    context.getUserGraphContext().recordOutputTaggedPValue();
-  }
-
-  private <T extends PTransform<?, ?>> boolean applyCanTranslate(
-      PTransform<?, ?> transform,
-      TransformHierarchy.Node node,
-      TransformTranslator<?> translator) {
-    @SuppressWarnings("unchecked")
-    T typedTransform = (T) transform;
-    @SuppressWarnings("unchecked")
-    TransformTranslator<T> typedTranslator = (TransformTranslator<T>) translator;
-
-    context.getUserGraphContext().setCurrentTransform(node.toAppliedPTransform());
-
-    return typedTranslator.canTranslate(typedTransform, context);
-  }
-
-  /**
-   * Utility formatting method.
-   *
-   * @param n number of spaces to generate
-   * @return String with "|" followed by n spaces
-   */
-  protected static String genSpaces(int n) {
-    StringBuilder builder = new StringBuilder();
-    for (int i = 0; i < n; i++) {
-      builder.append("|   ");
-    }
-    return builder.toString();
-  }
-
-  private static class ReflectiveOneToOneOverrideFactory<
-      InputT extends PValue,
-      OutputT extends PValue,
-      TransformT extends PTransform<InputT, OutputT>>
-      extends SingleInputOutputOverrideFactory<InputT, OutputT, TransformT> {
-    private final Class<PTransform<InputT, OutputT>> replacement;
-
-    private ReflectiveOneToOneOverrideFactory(
-        Class<PTransform<InputT, OutputT>> replacement) {
-      this.replacement = replacement;
-    }
-
-    @Override
-    public PTransformReplacement<InputT, OutputT> getReplacementTransform(
-        AppliedPTransform<InputT, OutputT, TransformT> appliedPTransform) {
-      PTransform<InputT, OutputT> originalPTransform = appliedPTransform.getTransform();
-      PTransform<InputT, OutputT> replacedPTransform = InstanceBuilder.ofType(replacement)
-          .withArg(
-              (Class<PTransform<InputT, OutputT>>) originalPTransform.getClass(),
-              originalPTransform)
-          .build();
-      InputT inputT = (InputT) Iterables.getOnlyElement(appliedPTransform.getInputs().values());
-      return PTransformReplacement.of(inputT, replacedPTransform);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
index 526352a..1230a31 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
@@ -20,11 +20,11 @@ package org.apache.beam.runners.jstorm.translation;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
 
-import avro.shaded.com.google.common.collect.Lists;
 import com.google.common.base.Function;
 import com.google.common.base.Joiner;
 import com.google.common.base.Strings;
 import com.google.common.collect.FluentIterable;
+import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 import java.util.ArrayList;
@@ -239,7 +239,9 @@ public class TranslationContext {
     }
   }
 
-  // TODO: add getSideInputs() and getSideOutputs().
+  /**
+   * Context of user graph.
+   */
   public static class UserGraphContext {
     private final JStormPipelineOptions options;
     private final Map<PValue, TupleTag> pValueToTupleTag;
@@ -326,6 +328,9 @@ public class TranslationContext {
     }
   }
 
+  /**
+   * Context of execution graph.
+   */
   public static class ExecutionGraphContext {
 
     private final Map<String, AdaptorBasicSpout> spoutMap = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
index 68e9e17..3d7fab8 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
@@ -24,7 +24,7 @@ import java.util.HashMap;
 import java.util.Map;
 import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
 
-/*
+/**
  * Enable user to add output stream definitions by API, rather than hard-code.
  */
 public abstract class AbstractComponent implements IComponent {
@@ -65,4 +65,4 @@ public abstract class AbstractComponent implements IComponent {
   public void setParallelismNum(int num) {
     parallelismNum = num;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
index 5e9b056..d8d4d46 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicBolt.java
@@ -19,6 +19,9 @@ package org.apache.beam.runners.jstorm.translation.runtime;
 
 import backtype.storm.topology.IRichBatchBolt;
 
+/**
+ * Adaptor bolt of JStorm extends {@link AbstractComponent}.
+ */
 public abstract class AdaptorBasicBolt extends AbstractComponent implements IRichBatchBolt {
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
index 0480518..814d416 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AdaptorBasicSpout.java
@@ -19,6 +19,9 @@ package org.apache.beam.runners.jstorm.translation.runtime;
 
 import backtype.storm.topology.IRichSpout;
 
+/**
+ * Adaptor bolt of JStorm extends {@link AbstractComponent}.
+ */
 public abstract class AdaptorBasicSpout extends AbstractComponent implements IRichSpout {
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
index 9507948..e07d890 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
@@ -20,9 +20,9 @@ package org.apache.beam.runners.jstorm.translation.runtime;
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 
-import avro.shaded.com.google.common.collect.Iterables;
 import com.alibaba.jstorm.cache.IKvStoreManager;
 import com.alibaba.jstorm.metric.MetricClient;
+import com.google.common.collect.Iterables;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -62,11 +62,19 @@ import org.joda.time.Instant;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * JStorm {@link Executor} for {@link DoFn}.
+ * @param <InputT> input type
+ * @param <OutputT> output type
+ */
 public class DoFnExecutor<InputT, OutputT> implements Executor {
   private static final long serialVersionUID = 5297603063991078668L;
 
   private static final Logger LOG = LoggerFactory.getLogger(DoFnExecutor.class);
 
+  /**
+   * Implements {@link OutputManager} in a DoFn executor.
+   */
   public class DoFnExecutorOutputManager implements OutputManager, Serializable {
     private static final long serialVersionUID = -661113364735206170L;
 
@@ -174,7 +182,7 @@ public class DoFnExecutor<InputT, OutputT> implements Executor {
     initService(context);
 
     // Side inputs setup
-    if (sideInputs != null && sideInputs.isEmpty() == false) {
+    if (sideInputs != null && !sideInputs.isEmpty()) {
       pushedBackTag = StateTags.bag("pushed-back-values", inputCoder);
       watermarkHoldTag =
           StateTags.watermarkStateInternal("hold", TimestampCombiner.EARLIEST);
@@ -261,10 +269,10 @@ public class DoFnExecutor<InputT, OutputT> implements Executor {
   }
 
   /**
-   * Process all pushed back elements when receiving watermark with max timestamp
+   * Process all pushed back elements when receiving watermark with max timestamp.
    */
   public void processAllPushBackElements() {
-    if (sideInputs != null && sideInputs.isEmpty() == false) {
+    if (sideInputs != null && !sideInputs.isEmpty()) {
       BagState<WindowedValue<InputT>> pushedBackElements =
           pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
       if (pushedBackElements != null) {

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
index 1a03cb8..0ec4fdd 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
@@ -21,13 +21,16 @@ import java.io.Serializable;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.TupleTag;
 
+/**
+ * An executor is a basic executable unit in a JStorm task.
+ */
 public interface Executor extends Serializable {
   /**
-   * Initialization during runtime
+   * Initialization during runtime.
    */
   void init(ExecutorContext context);
 
   <T> void process(TupleTag<T> tag, WindowedValue<T> elem);
 
   void cleanup();
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
index 1f65921..55ca171 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
@@ -21,6 +21,9 @@ import backtype.storm.task.TopologyContext;
 import com.alibaba.jstorm.cache.IKvStoreManager;
 import com.google.auto.value.AutoValue;
 
+/**
+ * Context of a executors bolt when runtime.
+ */
 @AutoValue
 public abstract class ExecutorContext {
   public static ExecutorContext of(

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
index e80fb48..d33c17a 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
@@ -19,8 +19,6 @@ package org.apache.beam.runners.jstorm.translation.runtime;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 
-import avro.shaded.com.google.common.base.Joiner;
-import avro.shaded.com.google.common.collect.Sets;
 import backtype.storm.task.OutputCollector;
 import backtype.storm.task.TopologyContext;
 import backtype.storm.tuple.ITupleExt;
@@ -31,8 +29,10 @@ import com.alibaba.jstorm.cache.KvStoreManagerFactory;
 import com.alibaba.jstorm.cluster.Common;
 import com.alibaba.jstorm.utils.KryoSerializer;
 import com.google.common.base.Function;
+import com.google.common.base.Joiner;
 import com.google.common.collect.FluentIterable;
 import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -49,6 +49,9 @@ import org.joda.time.Instant;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * ExecutorsBolt is a JStorm Bolt composited with several executors chained in a sub-DAG.
+ */
 public class ExecutorsBolt extends AdaptorBasicBolt {
   private static final long serialVersionUID = -7751043327801735211L;
 
@@ -129,10 +132,10 @@ public class ExecutorsBolt extends AdaptorBasicBolt {
       // init kv store manager
       String storeName = String.format("task-%d", context.getThisTaskId());
       String stateStorePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
-      IKvStoreManager kvStoreManager = isStatefulBolt ?
-          KvStoreManagerFactory.getKvStoreManagerWithMonitor(
-              context, storeName, stateStorePath, isStatefulBolt) :
-          KvStoreManagerFactory.getKvStoreManager(
+      IKvStoreManager kvStoreManager = isStatefulBolt
+              ? KvStoreManagerFactory.getKvStoreManagerWithMonitor(
+              context, storeName, stateStorePath, isStatefulBolt)
+              : KvStoreManagerFactory.getKvStoreManager(
               stormConf, storeName, stateStorePath, isStatefulBolt);
       this.executorContext = ExecutorContext.of(context, this, kvStoreManager);
 

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
index 5a07243..caf1e47 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
@@ -22,6 +22,10 @@ import static com.google.common.base.Preconditions.checkNotNull;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.TupleTag;
 
+/**
+ * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.Flatten}.
+ * @param <InputT>
+ */
 public class FlattenExecutor<InputT> implements Executor {
 
   private final String description;
@@ -53,4 +57,4 @@ public class FlattenExecutor<InputT> implements Executor {
   public String toString() {
     return description;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
index 625726d..0dd1af9 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
@@ -52,6 +52,11 @@ import org.apache.beam.sdk.values.WindowingStrategy;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.GroupByKey}.
+ * @param <K>
+ * @param <V>
+ */
 public class GroupByWindowExecutor<K, V>
     extends DoFnExecutor<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> {
   private static final long serialVersionUID = -7563050475488610553L;

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
index d36d9a6..a26472c 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
@@ -30,6 +30,11 @@ import org.apache.beam.sdk.values.WindowingStrategy;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * JStorm {@link Executor} for {@link DoFn} with multi-output.
+ * @param <InputT>
+ * @param <OutputT>
+ */
 public class MultiOutputDoFnExecutor<InputT, OutputT> extends DoFnExecutor<InputT, OutputT> {
   private static final Logger LOG = LoggerFactory.getLogger(MultiOutputDoFnExecutor.class);
 
@@ -71,4 +76,4 @@ public class MultiOutputDoFnExecutor<InputT, OutputT> extends DoFnExecutor<Input
     this.outputManager = new MultiOutputDoFnExecutorOutputManager();
     LOG.info("localTupleTagMap: {}", localTupleTagMap);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
index 45ac62a..5e87cff 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
@@ -32,6 +32,10 @@ import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
 
+/**
+ * JStorm {@link Executor} for stateful {@link DoFn} with multi-output.
+ * @param <OutputT>
+ */
 public class MultiStatefulDoFnExecutor<OutputT> extends MultiOutputDoFnExecutor<KV, OutputT> {
 
   public MultiStatefulDoFnExecutor(

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
index ba0c052..77ae844 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
@@ -32,6 +32,10 @@ import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
 
+/**
+ * JStorm {@link Executor} for stateful {@link DoFn}.
+ * @param <OutputT>
+ */
 public class StatefulDoFnExecutor<OutputT> extends DoFnExecutor<KV, OutputT> {
   public StatefulDoFnExecutor(
       String stepName, String description, JStormPipelineOptions pipelineOptions,

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
index d2514f1..0103095 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
@@ -20,9 +20,9 @@ package org.apache.beam.runners.jstorm.translation.runtime;
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkState;
 
-import avro.shaded.com.google.common.collect.Maps;
-import avro.shaded.com.google.common.collect.Sets;
 import com.alibaba.jstorm.utils.Pair;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -47,7 +47,7 @@ public class TimerServiceImpl implements TimerService {
   private final PriorityQueue<Long> inputWatermarks = new PriorityQueue<>();
   private final PriorityQueue<Instant> watermarkHolds = new PriorityQueue<>();
   private final Map<String, Instant> namespaceToWatermarkHold = new HashMap<>();
-  private transient final PriorityQueue<TimerInternals.TimerData> eventTimeTimersQueue =
+  private final transient PriorityQueue<TimerInternals.TimerData> eventTimeTimersQueue =
       new PriorityQueue<>();
   private final Map<TimerInternals.TimerData, Set<Pair<Integer, Object>>>
       timerDataToKeyedExecutors = Maps.newHashMap();
@@ -152,4 +152,4 @@ public class TimerServiceImpl implements TimerService {
     keyedExecutors.add(new Pair<>(doFnExecutor.getInternalDoFnExecutorId(), key));
     timerDataToKeyedExecutors.put(timerData, keyedExecutors);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
index 2bd5f7d..8dc51b5 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
@@ -29,6 +29,9 @@ import java.util.Map;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * Transactional executors bolt handles the checkpoint and restore of state and timer.
+ */
 public class TxExecutorsBolt implements ITransactionStatefulBoltExecutor {
   private static final Logger LOG = LoggerFactory.getLogger(TxExecutorsBolt.class);
 
@@ -127,4 +130,4 @@ public class TxExecutorsBolt implements ITransactionStatefulBoltExecutor {
       throw new RuntimeException(e.getMessage());
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
index 16f7d99..48b410f 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
@@ -29,6 +29,9 @@ import java.util.Map;
 import org.apache.beam.sdk.io.UnboundedSource;
 import org.slf4j.LoggerFactory;
 
+/**
+ * Transactional unbounded source spout handles the checkpoint and restore of state and timer.
+ */
 public class TxUnboundedSourceSpout implements ITransactionSpoutExecutor {
   private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(TxUnboundedSourceSpout.class);
 
@@ -150,4 +153,4 @@ public class TxUnboundedSourceSpout implements ITransactionSpoutExecutor {
   public void fail(Object msgId) {
     throw new UnsupportedOperationException();
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
index 7f98c61..006cd47 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
@@ -40,8 +40,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Spout implementation that wraps a Beam UnboundedSource
- * <p>
+ * Spout implementation that wraps a Beam UnboundedSource.
  * TODO: add wrapper to support metrics in UnboundedSource.
  */
 public class UnboundedSourceSpout extends AdaptorBasicSpout {
@@ -61,7 +60,7 @@ public class UnboundedSourceSpout extends AdaptorBasicSpout {
 
   private KryoSerializer<WindowedValue> serializer;
 
-  private long lastWaterMark = 0l;
+  private long lastWaterMark = 0L;
 
   public UnboundedSourceSpout(
       String description,

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
index 7f21d26..3cd0aa9 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
@@ -29,6 +29,11 @@ import org.joda.time.Instant;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.windowing.Window.Assign}.
+ * @param <T>
+ * @param <W>
+ */
 public class WindowAssignExecutor<T, W extends BoundedWindow> implements Executor {
   private static final Logger LOG = LoggerFactory.getLogger(WindowAssignExecutor.class);
 
@@ -104,4 +109,4 @@ public class WindowAssignExecutor<T, W extends BoundedWindow> implements Executo
   public String toString() {
     return description;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
index 1466f35..df54383 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
@@ -33,7 +33,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * JStorm implementation of {@link BagState}.
+ * Implementation of {@link BagState} in JStorm runner.
  */
 class JStormBagState<K, T> implements BagState<T> {
   private static final Logger LOG = LoggerFactory.getLogger(JStormBagState.class);
@@ -115,6 +115,9 @@ class JStormBagState<K, T> implements BagState<T> {
     return ComposedKey.of(key, namespace, elemIndex);
   }
 
+  /**
+   * Implementation of Bag state Iterable.
+   */
   private class BagStateIterable implements KvStoreIterable<T> {
 
     private class BagStateIterator implements Iterator<T> {

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
index f1c1ed0..ac3f91f 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
@@ -26,6 +26,11 @@ import org.apache.beam.sdk.state.ReadableState;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * Implementation of {@link MapState} in JStorm runner.
+ * @param <K>
+ * @param <V>
+ */
 public class JStormMapState<K, V> implements MapState<K, V> {
   private static final Logger LOG = LoggerFactory.getLogger(JStormMapState.class);
 
@@ -150,4 +155,4 @@ public class JStormMapState<K, V> implements MapState<K, V> {
       return this;
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
index bf8d472..44ce8d8 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
@@ -26,6 +26,10 @@ import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.TupleTag;
 
+/**
+ * Translates a {@link Flatten} to a JStorm {@link FlattenExecutor}.
+ * @param <V>
+ */
 public class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PCollections<V>> {
 
   @Override
@@ -44,4 +48,4 @@ public class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PC
     FlattenExecutor executor = new FlattenExecutor(description, userGraphContext.getOutputTag());
     context.addTransformExecutor(executor, inputs, userGraphContext.getOutputs());
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
index 85f96ce..85cb85d 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
@@ -29,6 +29,11 @@ import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
 
+/**
+ * Translates a {@link GroupByKey} to a JStorm {@link GroupByWindowExecutor}.
+ * @param <K>
+ * @param <V>
+ */
 public class GroupByKeyTranslator<K, V> extends TransformTranslator.Default<GroupByKey<K, V>> {
   // information of transform
   protected PCollection<KV<K, V>> input;

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
index 77e4381..6e3392c 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
@@ -17,9 +17,9 @@
  */
 package org.apache.beam.runners.jstorm.translation.translator;
 
-import avro.shaded.com.google.common.collect.Maps;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
index 7b998d9..ad8f85f 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
@@ -17,9 +17,9 @@
  */
 package org.apache.beam.runners.jstorm.translation.translator;
 
-import avro.shaded.com.google.common.collect.Lists;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
 import java.util.List;
 import java.util.Map;
 import org.apache.beam.runners.jstorm.translation.TranslationContext;
@@ -40,7 +40,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Translates a ParDo.Bound to a Storm {@link DoFnExecutor}.
+ * Translates a ParDo.Bound to a JStorm {@link DoFnExecutor}.
  */
 public class ParDoBoundTranslator<InputT, OutputT>
     extends TransformTranslator.Default<ParDo.SingleOutput<InputT, OutputT>> {

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
index a15a8ba..71243b9 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
@@ -39,6 +39,9 @@ public abstract class Stream {
         producer, consumer);
   }
 
+  /**
+   * JStorm producer.
+   */
   @AutoValue
   public abstract static class Producer {
     public abstract String getComponentId();
@@ -53,6 +56,9 @@ public abstract class Stream {
     }
   }
 
+  /**
+   * JStorm consumer.
+   */
   @AutoValue
   public abstract static class Consumer {
     public abstract String getComponentId();
@@ -65,6 +71,9 @@ public abstract class Stream {
     }
   }
 
+  /**
+   * JStorm grouping, which define how to transfer message between two nodes.
+   */
   @AutoValue
   public abstract static class Grouping {
     public abstract Type getType();
@@ -86,7 +95,7 @@ public abstract class Stream {
     }
 
     /**
-     * Types of stream groupings Storm allows
+     * Types of stream groupings Storm allows.
      */
     public enum Type {
       ALL, CUSTOM, DIRECT, SHUFFLE, LOCAL_OR_SHUFFLE, FIELDS, GLOBAL, NONE

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
index 487cac0..bfa94a0 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
@@ -38,6 +38,10 @@ public interface TransformTranslator<T extends PTransform<?, ?>> {
    */
   boolean canTranslate(T transform, TranslationContext context);
 
+    /**
+     * Default translator.
+     * @param <T1>
+     */
   class Default<T1 extends PTransform<?, ?>> implements TransformTranslator<T1> {
     @Override
     public void translateNode(T1 transform, TranslationContext context) {

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
index c55c8d6..f71ee9c 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
@@ -54,8 +54,7 @@ public class ViewTranslator
 
   /**
    * Specialized implementation for
-   * {@link org.apache.beam.sdk.transforms.View.AsMap View.AsMap}
-   * for the Flink runner in streaming mode.
+   * {@link org.apache.beam.sdk.transforms.View.AsMap View.AsMap}.
    */
   public static class ViewAsMap<K, V>
       extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, V>>> {
@@ -93,8 +92,7 @@ public class ViewTranslator
 
   /**
    * Specialized expansion for {@link
-   * View.AsMultimap View.AsMultimap} for the
-   * Flink runner in streaming mode.
+   * View.AsMultimap View.AsMultimap}.
    */
   public static class ViewAsMultimap<K, V>
       extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, Iterable<V>>>> {
@@ -135,8 +133,7 @@ public class ViewTranslator
 
   /**
    * Specialized implementation for
-   * {@link View.AsList View.AsList} for the
-   * JStorm runner in streaming mode.
+   * {@link View.AsList View.AsList}.
    */
   public static class ViewAsList<T>
       extends PTransform<PCollection<T>, PCollectionView<List<T>>> {
@@ -258,6 +255,12 @@ public class ViewTranslator
     }
   }
 
+  /**
+   * Specialized expansion for
+   * {@link org.apache.beam.sdk.transforms.Combine.GloballyAsSingletonView}.
+   * @param <InputT>
+   * @param <OutputT>
+     */
   public static class CombineGloballyAsSingletonView<InputT, OutputT>
       extends PTransform<PCollection<InputT>, PCollectionView<OutputT>> {
     Combine.GloballyAsSingletonView<InputT, OutputT> transform;
@@ -351,8 +354,7 @@ public class ViewTranslator
 
   /**
    * Creates a primitive {@link PCollectionView}.
-   * <p>
-   * <p>For internal use only by runner implementors.
+   * For internal use only by runner implementors.
    *
    * @param <ElemT> The type of the elements of the input PCollection
    * @param <ViewT> The type associated with the {@link PCollectionView} used as a side input

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
index 6de34dd..2ccb8d7 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
@@ -21,6 +21,11 @@ import org.apache.beam.runners.jstorm.translation.TranslationContext;
 import org.apache.beam.runners.jstorm.translation.runtime.WindowAssignExecutor;
 import org.apache.beam.sdk.transforms.windowing.Window;
 
+/**
+ * Translates a {@link org.apache.beam.sdk.transforms.windowing.Window.Assign} to a
+ * JStorm {@link WindowAssignExecutor}.
+ * @param <T>
+ */
 public class WindowAssignTranslator<T> extends TransformTranslator.Default<Window.Assign<T>> {
 
   @Override
@@ -35,4 +40,4 @@ public class WindowAssignTranslator<T> extends TransformTranslator.Default<Windo
         userGraphContext.getOutputTag());
     context.addTransformExecutor(executor);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
index 596d8b4..4b92a4c 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
@@ -17,9 +17,12 @@
  */
 package org.apache.beam.runners.jstorm.translation.util;
 
+/**
+ * Common definition of JStorm runner.
+ */
 public class CommonInstance {
   public static final String KEY = "Key";
   public static final String VALUE = "Value";
 
   public static final String BEAM_WATERMARK_STREAM_ID = "BEAM_WATERMARK";
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
index 9fd62e4..ad83c2b 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
@@ -25,10 +25,12 @@ import org.apache.beam.runners.jstorm.translation.runtime.StatefulDoFnExecutor;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.KV;
 
+/**
+ * Utils for JStorm runner.
+ */
 public class RunnerUtils {
   /**
-   * Convert WindowedValue<KV<>> into KeyedWorkItem<K, WindowedValue<V>>
-   *
+   * Convert {@link WindowedValue} into {@link KeyedWorkItem}.
    * @param elem
    * @return
    */
@@ -43,11 +45,11 @@ public class RunnerUtils {
   public static boolean isGroupByKeyExecutor(Executor executor) {
     if (executor instanceof GroupByWindowExecutor) {
       return true;
-    } else if (executor instanceof StatefulDoFnExecutor ||
-        executor instanceof MultiStatefulDoFnExecutor) {
+    } else if (executor instanceof StatefulDoFnExecutor
+            || executor instanceof MultiStatefulDoFnExecutor) {
       return true;
     } else {
       return false;
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
index 182794f..479afdc 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
@@ -34,7 +34,7 @@ public class SerializedPipelineOptions implements Serializable {
   private final byte[] serializedOptions;
 
   /**
-   * Lazily initialized copy of deserialized options
+   * Lazily initialized copy of deserialized options.
    */
   private transient PipelineOptions pipelineOptions;
 

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
index cce21b3..46a12b9 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
@@ -24,7 +24,6 @@ import org.apache.beam.sdk.util.WindowedValue;
 
 /**
  * Singleton keyed word item.
- *
  * @param <K>
  * @param <ElemT>
  */
@@ -60,4 +59,4 @@ public class SingletonKeyedWorkItem<K, ElemT> implements KeyedWorkItem<K, ElemT>
   public Iterable<WindowedValue<ElemT>> elementsIterable() {
     return Collections.singletonList(value);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/5a15d548/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
index 2a8160c..66f33a7 100644
--- a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
+++ b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
@@ -21,10 +21,10 @@ import static org.hamcrest.Matchers.containsInAnyOrder;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 
-import avro.shaded.com.google.common.collect.Maps;
 import com.alibaba.jstorm.cache.IKvStoreManager;
 import com.alibaba.jstorm.cache.rocksdb.RocksDbKvStoreManagerFactory;
 import com.alibaba.jstorm.utils.KryoSerializer;
+import com.google.common.collect.Maps;
 import java.util.Iterator;
 import java.util.Map;
 import org.apache.beam.runners.core.StateNamespaces;
@@ -175,11 +175,11 @@ public class JStormStateInternalsTest {
     Iterable<Map.Entry<Integer, Integer>> entries = mapStateA.entries().read();
     Iterator<Map.Entry<Integer, Integer>> itr = entries.iterator();
     Map.Entry<Integer, Integer> entry = itr.next();
-    assertEquals((long) entry.getKey(), 1l);
-    assertEquals((long) entry.getValue(), 12l);
+    assertEquals((long) entry.getKey(), 1L);
+    assertEquals((long) entry.getValue(), 12L);
     entry = itr.next();
-    assertEquals((long) entry.getKey(), 2l);
-    assertEquals((long) entry.getValue(), 22l);
+    assertEquals((long) entry.getKey(), 2L);
+    assertEquals((long) entry.getValue(), 22L);
     assertEquals(false, itr.hasNext());
 
     mapStateA.remove(1);
@@ -191,8 +191,8 @@ public class JStormStateInternalsTest {
     entries = mapStateA.entries().read();
     itr = entries.iterator();
     entry = itr.next();
-    assertEquals((long) entry.getKey(), 2l);
-    assertEquals((long) entry.getValue(), 22l);
+    assertEquals((long) entry.getKey(), 2L);
+    assertEquals((long) entry.getValue(), 22L);
     assertEquals(false, itr.hasNext());
   }
 


[51/53] [abbrv] beam git commit: jstorm-runner: Add job file to run ValidatesRunner tests of JStorm runner

Posted by pe...@apache.org.
jstorm-runner: Add job file to run ValidatesRunner tests of JStorm runner


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/cb1c3f46
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/cb1c3f46
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/cb1c3f46

Branch: refs/heads/jstorm-runner
Commit: cb1c3f4694a777b2f5a19959561e388bda36a972
Parents: 9e80873
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Wed Aug 16 19:03:38 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:03:01 2017 +0800

----------------------------------------------------------------------
 ...ostCommit_Java_ValidatesRunner_JStorm.groovy | 43 ++++++++++++++++++++
 1 file changed, 43 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/cb1c3f46/.test-infra/jenkins/job_beam_PostCommit_Java_ValidatesRunner_JStorm.groovy
----------------------------------------------------------------------
diff --git a/.test-infra/jenkins/job_beam_PostCommit_Java_ValidatesRunner_JStorm.groovy b/.test-infra/jenkins/job_beam_PostCommit_Java_ValidatesRunner_JStorm.groovy
new file mode 100644
index 0000000..f7578fa
--- /dev/null
+++ b/.test-infra/jenkins/job_beam_PostCommit_Java_ValidatesRunner_JStorm.groovy
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import common_job_properties
+
+// This job runs the suite of ValidatesRunner tests against the JStorm runner.
+mavenJob('beam_PostCommit_Java_ValidatesRunner_JStorm') {
+  description('Runs the ValidatesRunner suite on the JStorm runner.')
+  previousNames('beam_PostCommit_Java_RunnableOnService_JStorm')
+
+  // Set common parameters.
+  common_job_properties.setTopLevelMainJobProperties(delegate)
+
+  // Set maven parameters.
+  common_job_properties.setMavenConfig(delegate)
+
+  // Sets that this is a PostCommit job.
+  common_job_properties.setPostCommit(delegate)
+
+  // Allows triggering this build against pull requests.
+  common_job_properties.enablePhraseTriggeringFromPullRequest(
+    delegate,
+    'Apache JStorm Runner ValidatesRunner Tests',
+    'Run JStorm ValidatesRunner')
+
+  // Maven goals for this job.
+  goals('-B -e clean verify -am -pl runners/jstorm -Plocal-validates-runner-tests -Pvalidates-runner-tests')
+}


[45/53] [abbrv] beam git commit: jstorm-runner: Fix duplicated update of metric counter

Posted by pe...@apache.org.
jstorm-runner: Fix duplicated update of metric counter


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/240f61bc
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/240f61bc
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/240f61bc

Branch: refs/heads/jstorm-runner
Commit: 240f61bc6baab9e698b2b6144688853a8371658f
Parents: 61e9fa6
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Wed Aug 9 16:03:50 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:03:00 2017 +0800

----------------------------------------------------------------------
 .../org/apache/beam/runners/jstorm/translation/MetricsReporter.java | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/240f61bc/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java
index 82d8bdc..5b60b03 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MetricsReporter.java
@@ -74,6 +74,7 @@ class MetricsReporter {
         AsmCounter counter = metricClient.registerCounter(metricName);
         Long incValue = (oldValue == null ? updateValue : updateValue - oldValue);
         counter.update(incValue);
+        reportedCounters.put(metricName, incValue);
       }
     }
   }


[52/53] [abbrv] beam git commit: jstorm-runner: The failure of testing "SDK Python" blocked the validation of JStorm runner, so comment out "SDK Python" module temporarily. After the validation of JStorm runner. this commit shall be reverted.

Posted by pe...@apache.org.
jstorm-runner: The failure of testing "SDK Python" blocked the validation of JStorm runner, so comment out "SDK Python" module temporarily. After the validation of JStorm runner. this commit shall be reverted.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/87aaa6e2
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/87aaa6e2
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/87aaa6e2

Branch: refs/heads/jstorm-runner
Commit: 87aaa6e259f75d890a08da75ad9a175402e06660
Parents: cb1c3f4
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Thu Aug 17 17:12:28 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:03:01 2017 +0800

----------------------------------------------------------------------
 sdks/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/87aaa6e2/sdks/pom.xml
----------------------------------------------------------------------
diff --git a/sdks/pom.xml b/sdks/pom.xml
index 27b9610..32c329d 100644
--- a/sdks/pom.xml
+++ b/sdks/pom.xml
@@ -35,7 +35,7 @@
   <modules>
     <module>common</module>
     <module>java</module>
-    <module>python</module>
+    <!--<module>python</module>-->
   </modules>
 
   <profiles>


[04/53] [abbrv] beam git commit: jstorm-runner: rename the package to org.apache.beam.runners.jstorm.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
new file mode 100644
index 0000000..1de881f
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import backtype.storm.task.TopologyContext;
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.google.auto.value.AutoValue;
+
+@AutoValue
+public abstract class ExecutorContext {
+    public static ExecutorContext of(TopologyContext topologyContext, ExecutorsBolt bolt, IKvStoreManager kvStoreManager) {
+        return new AutoValue_ExecutorContext(topologyContext, bolt, kvStoreManager);
+    }
+
+    public abstract TopologyContext getTopologyContext();
+
+    public abstract ExecutorsBolt getExecutorsBolt();
+
+    public abstract IKvStoreManager getKvStoreManager();
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
new file mode 100644
index 0000000..9df1e17
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
@@ -0,0 +1,327 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import java.io.IOException;
+import java.util.*;
+
+import avro.shaded.com.google.common.base.Joiner;
+import avro.shaded.com.google.common.collect.Sets;
+import backtype.storm.tuple.ITupleExt;
+import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.alibaba.jstorm.cache.KvStoreManagerFactory;
+import com.alibaba.jstorm.cluster.Common;
+import com.alibaba.jstorm.utils.KryoSerializer;
+import com.google.common.base.Function;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.Maps;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.TupleTag;
+import org.joda.time.Instant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+public class ExecutorsBolt extends AdaptorBasicBolt {
+    private static final long serialVersionUID = -7751043327801735211L;
+
+    private static final Logger LOG = LoggerFactory.getLogger(ExecutorsBolt.class);
+
+    protected ExecutorContext executorContext;
+
+    protected TimerService timerService;
+
+    // map from input tag to executor inside bolt
+    protected final Map<TupleTag, Executor> inputTagToExecutor = Maps.newHashMap();
+    // set of all output tags that will be emit outside bolt
+    protected final Set<TupleTag> outputTags = Sets.newHashSet();
+    protected final Set<TupleTag> externalOutputTags = Sets.newHashSet();
+    protected final Set<DoFnExecutor> doFnExecutors = Sets.newHashSet();
+    protected int internalDoFnExecutorId = 1;
+    protected final Map<Integer, DoFnExecutor> idToDoFnExecutor = Maps.newHashMap();
+
+    protected OutputCollector collector;
+
+    protected boolean isStatefulBolt = false;
+
+    protected KryoSerializer<WindowedValue> serializer;
+
+    public ExecutorsBolt() {
+
+    }
+
+    public void setStatefulBolt(boolean isStateful) {
+        isStatefulBolt = isStateful;
+    }
+
+    public void addExecutor(TupleTag inputTag, Executor executor) {
+        inputTagToExecutor.put(
+                checkNotNull(inputTag, "inputTag"),
+                checkNotNull(executor, "executor"));
+    }
+
+    public Map<TupleTag, Executor> getExecutors() {
+        return inputTagToExecutor;
+    }
+
+    public void registerExecutor(Executor executor) {
+        if (executor instanceof DoFnExecutor) {
+            DoFnExecutor doFnExecutor = (DoFnExecutor) executor;
+            idToDoFnExecutor.put(internalDoFnExecutorId, doFnExecutor);
+            doFnExecutor.setInternalDoFnExecutorId(internalDoFnExecutorId);
+            internalDoFnExecutorId++;
+        }
+    }
+
+    public Map<Integer, DoFnExecutor> getIdToDoFnExecutor() {
+        return idToDoFnExecutor;
+    }
+
+    public void addOutputTags(TupleTag tag) {
+        outputTags.add(tag);
+    }
+
+    public void addExternalOutputTag(TupleTag<?> tag) {
+        externalOutputTags.add(tag);
+    }
+
+    public Set<TupleTag> getOutputTags() {
+        return outputTags;
+    }
+
+    public ExecutorContext getExecutorContext() {
+        return executorContext;
+    }
+
+    @Override
+    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+        LOG.info("Start to prepare for task-{}", context.getThisTaskId());
+        try {
+            this.collector = collector;
+
+            // init kv store manager
+            String storeName = String.format("task-%d", context.getThisTaskId());
+            String stateStorePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
+            IKvStoreManager kvStoreManager = isStatefulBolt ? KvStoreManagerFactory.getKvStoreManagerWithMonitor(context, storeName, stateStorePath, isStatefulBolt) :
+                    KvStoreManagerFactory.getKvStoreManager(stormConf, storeName, stateStorePath, isStatefulBolt);
+            this.executorContext = ExecutorContext.of(context, this, kvStoreManager);
+
+            // init time service
+            timerService = initTimerService();
+
+            // init all internal executors
+            for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
+                executor.init(executorContext);
+                if (executor instanceof DoFnExecutor) {
+                    doFnExecutors.add((DoFnExecutor) executor);
+                }
+            }
+
+            this.serializer = new KryoSerializer<WindowedValue>(stormConf);
+
+            LOG.info("ExecutorsBolt finished init. LocalExecutors={}", inputTagToExecutor.values());
+            LOG.info("inputTagToExecutor={}", inputTagToExecutor);
+            LOG.info("outputTags={}", outputTags);
+            LOG.info("externalOutputTags={}", externalOutputTags);
+            LOG.info("doFnExecutors={}", doFnExecutors);
+        } catch (IOException e) {
+            throw new RuntimeException("Failed to prepare executors bolt", e);
+        }
+    }
+
+    public TimerService initTimerService() {
+        TopologyContext context = executorContext.getTopologyContext();
+        List<Integer> tasks = FluentIterable.from(context.getThisSourceComponentTasks().entrySet())
+                .transformAndConcat(
+                        new Function<Map.Entry<String, List<Integer>>, Iterable<Integer>>() {
+                            @Override
+                            public Iterable<Integer> apply(Map.Entry<String, List<Integer>> value) {
+                                if (Common.isSystemComponent(value.getKey())) {
+                                    return Collections.EMPTY_LIST;
+                                } else {
+                                    return value.getValue();
+                                }
+                            }
+                        })
+                .toList();
+        TimerService ret = new TimerServiceImpl(executorContext);
+        ret.init(tasks);
+        return ret;
+    }
+
+    @Override
+    public void execute(Tuple input) {
+        // process a batch
+        String streamId = input.getSourceStreamId();
+        ITupleExt tuple = (ITupleExt) input;
+        Iterator<List<Object>> valueIterator = tuple.batchValues().iterator();
+        if (CommonInstance.BEAM_WATERMARK_STREAM_ID.equals(streamId)) {
+            while (valueIterator.hasNext()) {
+                processWatermark((Long) valueIterator.next().get(0), input.getSourceTask());
+            }
+        } else {
+            doFnStartBundle();
+            while (valueIterator.hasNext()) {
+                processElement(valueIterator.next(), streamId);
+            }
+            doFnFinishBundle();
+        }
+    }
+
+    private void processWatermark(long watermarkTs, int sourceTask) {
+        long newWaterMark = timerService.updateInputWatermark(sourceTask, watermarkTs);
+        LOG.debug("Recv waterMark-{} from task-{}, newWaterMark={}",
+                (new Instant(watermarkTs)).toDateTime(), sourceTask, (new Instant(newWaterMark)).toDateTime());
+        if (newWaterMark != 0) {
+            // Some buffer windows are going to be triggered.
+            doFnStartBundle();
+            timerService.fireTimers(newWaterMark);
+
+            // SideInput: If receiving water mark with max timestamp, It means no more data is supposed
+            // to be received from now on. So we are going to process all push back data.
+            if (newWaterMark == BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis()) {
+                for (DoFnExecutor doFnExecutor : doFnExecutors) {
+                    doFnExecutor.processAllPushBackElements();
+                }
+            }
+
+            doFnFinishBundle();
+        }
+
+        long currentWaterMark = timerService.currentOutputWatermark();
+        if (!externalOutputTags.isEmpty()) {
+            collector.flush();
+            collector.emit(
+                    CommonInstance.BEAM_WATERMARK_STREAM_ID,
+                    new Values(currentWaterMark));
+            LOG.debug("Send waterMark-{}", (new Instant(currentWaterMark)).toDateTime());
+        }
+    }
+
+    private void processElement(List<Object> values, String streamId) {
+        TupleTag inputTag = new TupleTag(streamId);
+        WindowedValue windowedValue = retrieveWindowedValueFromTupleValue(values);
+        processExecutorElem(inputTag, windowedValue);
+    }
+
+    public <T> void processExecutorElem(TupleTag<T> inputTag, WindowedValue<T> elem) {
+        LOG.debug("ProcessExecutorElem: inputTag={}, value={}", inputTag, elem.getValue());
+        if (elem != null) {
+            Executor executor = inputTagToExecutor.get(inputTag);
+            if (executor != null) {
+                executor.process(inputTag, elem);
+            }
+            if (externalOutputTags.contains(inputTag)) {
+                emitOutsideBolt(inputTag, elem);
+            }
+        } else {
+            LOG.info("Received null elem for tag={}", inputTag);
+        }
+    }
+
+    @Override
+    public void cleanup() {
+        for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
+            executor.cleanup();
+        }
+        executorContext.getKvStoreManager().close();
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return null;
+    }
+
+    public TimerService timerService() {
+        return timerService;
+    }
+
+    public void setTimerService(TimerService service) {
+        timerService = service;
+    }
+
+    private WindowedValue retrieveWindowedValueFromTupleValue(List<Object> values) {
+        WindowedValue wv = null;
+        if (values.size() > 1) {
+            Object key = values.get(0);
+            WindowedValue value = serializer.deserialize((byte[]) values.get(1));
+            wv = value.withValue(KV.of(key, value.getValue()));
+        } else {
+            wv = serializer.deserialize((byte[])values.get(0));
+        }
+        return wv;
+    }
+
+    protected void emitOutsideBolt(TupleTag outputTag, WindowedValue outputValue) {
+        LOG.debug("Output outside: tag={}, value={}", outputTag, outputValue.getValue());
+        if (keyedEmit(outputTag.getId())) {
+            KV kv = (KV) outputValue.getValue();
+            byte[] immutableOutputValue = serializer.serialize(outputValue.withValue(kv.getValue()));
+            // Convert WindowedValue<KV> to <K, WindowedValue<V>>
+            if (kv.getKey() == null) {
+                // If key is null, emit "null" string here. Because, null value will be ignored in JStorm.
+                collector.emit(outputTag.getId(), new Values("null", immutableOutputValue));
+            } else {
+                collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableOutputValue));
+            }
+        } else {
+            byte[] immutableOutputValue = serializer.serialize(outputValue);
+            collector.emit(outputTag.getId(), new Values(immutableOutputValue));
+        }
+    }
+
+    private void doFnStartBundle() {
+        for (DoFnExecutor doFnExecutor : doFnExecutors) {
+            doFnExecutor.startBundle();
+        }
+    }
+
+    private void doFnFinishBundle() {
+        for (DoFnExecutor doFnExecutor : doFnExecutors) {
+            doFnExecutor.finishBundle();
+        }
+    }
+
+    @Override
+    public String toString() {
+        // LOG.info("bolt: " + executorContext.getTopologyContext().toJSONString());
+        List<String> ret = new ArrayList<>();
+        /*ret.add("inputTags");
+        for (TupleTag inputTag : inputTagToExecutor.keySet()) {
+            ret.add(inputTag.getId());
+        }*/
+        ret.add("internalExecutors");
+        for (Executor executor : inputTagToExecutor.values()) {
+            ret.add(executor.toString());
+        }
+        ret.add("externalOutputTags");
+        for (TupleTag output : externalOutputTags) {
+            ret.add(output.getId());
+        }
+        return Joiner.on('\n').join(ret).concat("\n");
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
new file mode 100644
index 0000000..1ef28c9
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+public class FlattenExecutor<InputT> implements Executor {
+
+    private final String description;
+    private TupleTag mainOutputTag;
+    private ExecutorContext context;
+    private ExecutorsBolt executorsBolt;
+
+    public FlattenExecutor(String description, TupleTag mainTupleTag) {
+        this.description = checkNotNull(description, "description");
+        this.mainOutputTag = mainTupleTag;
+    }
+
+    @Override
+    public void init(ExecutorContext context) {
+        this.context = context;
+        this.executorsBolt = context.getExecutorsBolt();
+    }
+
+    @Override
+    public void process(TupleTag tag, WindowedValue elem) {
+        executorsBolt.processExecutorElem(mainOutputTag, elem);
+    }
+
+    @Override
+    public void cleanup() {}
+
+    @Override
+    public String toString() {
+        return description;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
new file mode 100644
index 0000000..419a4a0
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
+import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
+import com.google.common.collect.ImmutableList;
+import org.apache.beam.runners.core.DoFnRunner;
+import org.apache.beam.runners.core.DoFnRunners;
+import org.apache.beam.runners.core.GroupAlsoByWindowViaWindowSetNewDoFn;
+import org.apache.beam.runners.core.KeyedWorkItem;
+import org.apache.beam.runners.core.KeyedWorkItems;
+import org.apache.beam.runners.core.StateInternals;
+import org.apache.beam.runners.core.StateInternalsFactory;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.runners.core.StateNamespaces;
+import org.apache.beam.runners.core.SystemReduceFn;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.runners.core.TimerInternalsFactory;
+import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.runners.core.NullSideInputReader;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.WindowingStrategy;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.TupleTag;
+
+import org.apache.beam.runners.jstorm.StormPipelineOptions;
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.runners.jstorm.translation.TranslationContext.UserGraphContext;
+import org.apache.beam.runners.jstorm.util.RunnerUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+public class GroupByWindowExecutor<K, V> extends DoFnExecutor<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> {
+    private static final long serialVersionUID = -7563050475488610553L;
+
+    private static final Logger LOG = LoggerFactory.getLogger(GroupByWindowExecutor.class);
+
+    private class GroupByWindowOutputManager implements DoFnRunners.OutputManager, Serializable {
+
+        @Override
+        public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
+            executorsBolt.processExecutorElem(tag, output);
+        }
+    }
+
+    private KvCoder<K, V> inputKvCoder;
+    private SystemReduceFn<K, V, Iterable<V>, Iterable<V>, BoundedWindow> reduceFn;
+
+    public GroupByWindowExecutor(
+            String stepName,
+            String description,
+            TranslationContext context,
+            StormPipelineOptions pipelineOptions,
+            WindowingStrategy<?, ?> windowingStrategy,
+            TupleTag<KV<K, Iterable<V>>> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
+        // The doFn will be created when runtime. Just pass "null" here
+        super(stepName, description, pipelineOptions, null, null, windowingStrategy, null, null, null, mainTupleTag, sideOutputTags);
+
+        this.outputManager = new GroupByWindowOutputManager();
+        UserGraphContext userGraphContext = context.getUserGraphContext();
+        PCollection<KV<K, V>> input = (PCollection<KV<K, V>>) userGraphContext.getInput();
+        this.inputKvCoder = (KvCoder<K, V>) input.getCoder();
+    }
+
+    private DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getGroupByWindowDoFn() {
+        final StateInternalsFactory<K> stateFactory = new StateInternalsFactory<K>() {
+            @Override
+            public StateInternals stateInternalsForKey(K key) {
+                return new JStormStateInternals<K>(key, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
+            }
+        };
+        TimerInternalsFactory<K> timerFactory = new TimerInternalsFactory<K>() {
+            @Override
+            public TimerInternals timerInternalsForKey(K key) {
+                return new JStormTimerInternals<>(key, GroupByWindowExecutor.this, executorContext.getExecutorsBolt().timerService());
+            }
+        };
+
+        reduceFn = SystemReduceFn.buffering(inputKvCoder.getValueCoder());
+        DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFn =
+              GroupAlsoByWindowViaWindowSetNewDoFn.create(
+                  windowingStrategy, stateFactory, timerFactory, NullSideInputReader.empty(),
+                      (SystemReduceFn) reduceFn, outputManager, mainTupleTag);
+        return doFn;
+    }
+
+    @Override
+    protected DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getDoFnRunner() {
+        doFn = getGroupByWindowDoFn();
+
+        DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> simpleRunner = DoFnRunners.<KeyedWorkItem<K, V>, KV<K, Iterable<V>>>simpleRunner(
+                this.pipelineOptions,
+                this.doFn,
+                NullSideInputReader.empty(),
+                this.outputManager,
+                this.mainTupleTag,
+                this.sideOutputTags,
+                this.stepContext,
+                this.windowingStrategy);
+
+        DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFnRunner = DoFnRunners.lateDataDroppingRunner(
+                simpleRunner,
+                this.stepContext,
+                this.windowingStrategy);
+        return new DoFnRunnerWithMetrics<>(
+            stepName, doFnRunner, MetricsReporter.create(metricClient));
+    }
+
+    @Override
+    public void process(TupleTag tag, WindowedValue elem) {
+        /**
+         *  For GroupByKey, KV type elem is received. We need to convert the KV elem
+         *  into KeyedWorkItem first, which is the expected type in LateDataDroppingDoFnRunner.
+         */
+        KeyedWorkItem<K, V> keyedWorkItem = RunnerUtils.toKeyedWorkItem((WindowedValue<KV<K, V>>) elem);
+        runner.processElement(elem.withValue(keyedWorkItem));
+    }
+
+    @Override
+    public void onTimer(Object key, TimerInternals.TimerData timerData) {
+        StateNamespace namespace = timerData.getNamespace();
+        checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
+
+        runner.processElement(
+                WindowedValue.valueInGlobalWindow(
+                        KeyedWorkItems.<K, V>timersWorkItem((K) key, ImmutableList.of(timerData))));
+    }
+
+    @Override
+    public String toString() {
+        return super.toString();
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MetricsReporter.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MetricsReporter.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MetricsReporter.java
new file mode 100644
index 0000000..a022440
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MetricsReporter.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static org.apache.beam.runners.core.metrics.MetricsContainerStepMap.asAttemptedOnlyMetricResults;
+
+import com.alibaba.jstorm.common.metric.AsmCounter;
+import com.alibaba.jstorm.metric.MetricClient;
+import com.google.common.collect.Maps;
+import java.util.Map;
+import org.apache.beam.runners.core.metrics.MetricsContainerStepMap;
+import org.apache.beam.sdk.metrics.MetricQueryResults;
+import org.apache.beam.sdk.metrics.MetricResult;
+import org.apache.beam.sdk.metrics.MetricResults;
+import org.apache.beam.sdk.metrics.MetricsContainer;
+import org.apache.beam.sdk.metrics.MetricsFilter;
+
+/**
+ * Class that holds a {@link MetricsContainerStepMap}, and reports metrics to JStorm engine.
+ */
+public class MetricsReporter {
+
+  private static final String METRIC_KEY_SEPARATOR = "__";
+  private static final String COUNTER_PREFIX = "__counter";
+
+  private final MetricsContainerStepMap metricsContainers = new MetricsContainerStepMap();
+  private final Map<String, Long> reportedCounters = Maps.newHashMap();
+  private final MetricClient metricClient;
+
+  public static MetricsReporter create(MetricClient metricClient) {
+    return new MetricsReporter(metricClient);
+  }
+
+  private MetricsReporter(MetricClient metricClient) {
+    this.metricClient = checkNotNull(metricClient, "metricClient");
+  }
+
+  public MetricsContainer getMetricsContainer(String stepName) {
+    return metricsContainers.getContainer(stepName);
+  }
+
+  public void updateMetrics() {
+    MetricResults metricResults = asAttemptedOnlyMetricResults(metricsContainers);
+    MetricQueryResults metricQueryResults =
+        metricResults.queryMetrics(MetricsFilter.builder().build());
+    updateCounters(metricQueryResults.counters());
+  }
+
+  private void updateCounters(Iterable<MetricResult<Long>> counters) {
+    System.out.print("updateCounters");
+    for (MetricResult<Long> metricResult : counters) {
+      String metricName = getMetricNameString(COUNTER_PREFIX, metricResult);
+      System.out.print("metricName: " + metricName);
+      Long updateValue = metricResult.attempted();
+      Long oldValue = reportedCounters.get(metricName);
+
+      if (oldValue == null || oldValue < updateValue) {
+        AsmCounter counter = metricClient.registerCounter(metricName);
+        Long incValue = (oldValue == null ? updateValue : updateValue - oldValue);
+        counter.update(incValue);
+      }
+    }
+  }
+
+  private String getMetricNameString(String prefix, MetricResult<?> metricResult) {
+    return prefix
+        + METRIC_KEY_SEPARATOR + metricResult.step()
+        + METRIC_KEY_SEPARATOR + metricResult.name().namespace()
+        + METRIC_KEY_SEPARATOR + metricResult.name().name();
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
new file mode 100644
index 0000000..28dc234
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import org.apache.beam.runners.jstorm.StormPipelineOptions;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.WindowingStrategy;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+public class MultiOutputDoFnExecutor<InputT, OutputT> extends DoFnExecutor<InputT, OutputT> {
+    private static final Logger LOG = LoggerFactory.getLogger(MultiOutputDoFnExecutor.class);
+
+    /**
+     * For multi-output scenario,a "local" tuple tag is used in producer currently while a generated tag
+     * is used in downstream consumer. So before output, we need to map this "local" tag to "external"
+     * tag. See PCollectionTuple for details.
+     */
+    public class MultiOutputDoFnExecutorOutputManager extends DoFnExecutorOutputManager {
+        @Override
+        public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
+            if (localTupleTagMap.containsKey(tag)) {
+                executorsBolt.processExecutorElem((TupleTag<T>) localTupleTagMap.get(tag), output);
+            } else {
+                executorsBolt.processExecutorElem(tag, output);
+            }
+        }
+    }
+
+    protected Map<TupleTag<?>, TupleTag<?>> localTupleTagMap;
+
+    public MultiOutputDoFnExecutor(
+            String stepName,
+            String description,
+            StormPipelineOptions pipelineOptions,
+            DoFn<InputT, OutputT> doFn,
+            Coder<WindowedValue<InputT>> inputCoder,
+            WindowingStrategy<?, ?> windowingStrategy,
+            TupleTag<InputT> mainInputTag,
+            Collection<PCollectionView<?>> sideInputs,
+            Map<TupleTag, PCollectionView<?>> sideInputTagToView,
+            TupleTag<OutputT> mainTupleTag,
+            List<TupleTag<?>> sideOutputTags,
+            Map<TupleTag<?>, TupleTag<?>> localTupleTagMap
+            ) {
+        super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag,
+                sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
+        this.localTupleTagMap = localTupleTagMap;
+        this.outputManager = new MultiOutputDoFnExecutorOutputManager();
+        LOG.info("localTupleTagMap: {}", localTupleTagMap);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
new file mode 100644
index 0000000..a58a818
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import org.apache.beam.runners.jstorm.StormPipelineOptions;
+import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
+import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.WindowingStrategy;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+public class MultiStatefulDoFnExecutor<OutputT> extends MultiOutputDoFnExecutor<KV, OutputT> {
+
+    public MultiStatefulDoFnExecutor(
+        String stepName, String description,
+        StormPipelineOptions pipelineOptions, DoFn<KV, OutputT> doFn,
+        Coder<WindowedValue<KV>> inputCoder, WindowingStrategy<?, ?> windowingStrategy,
+        TupleTag<KV> mainInputTag, Collection<PCollectionView<?>> sideInputs,
+        Map<TupleTag, PCollectionView<?>> sideInputTagToView, TupleTag<OutputT> mainTupleTag,
+        List<TupleTag<?>> sideOutputTags, Map<TupleTag<?>, TupleTag<?>> localTupleTagMap) {
+        super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag, sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags, localTupleTagMap);
+    }
+
+    @Override
+    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+        if (mainInputTag.equals(tag)) {
+            WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
+            stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
+                    executorContext.getExecutorsBolt().timerService()));
+            stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
+                    kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+            processMainInput(elem);
+        } else {
+            processSideInput(tag, elem);
+        }
+    }
+
+    @Override
+    public void onTimer(Object key, TimerInternals.TimerData timerData) {
+        stepContext.setStateInternals(new JStormStateInternals<>(key,
+                kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+        super.onTimer(key, timerData);
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
new file mode 100644
index 0000000..269f03c
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import org.apache.beam.runners.jstorm.StormPipelineOptions;
+import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
+import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.WindowingStrategy;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+public class StatefulDoFnExecutor<OutputT> extends DoFnExecutor<KV, OutputT> {
+    public StatefulDoFnExecutor(
+            String stepName, String description, StormPipelineOptions pipelineOptions,
+            DoFn<KV, OutputT> doFn, Coder<WindowedValue<KV>> inputCoder,
+            WindowingStrategy<?, ?> windowingStrategy, TupleTag<KV> mainInputTag,
+            Collection<PCollectionView<?>> sideInputs, Map<TupleTag, PCollectionView<?>>
+                    sideInputTagToView, TupleTag<OutputT> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
+        super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy,
+                mainInputTag, sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
+    }
+
+    @Override
+    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+        if (mainInputTag.equals(tag)) {
+            WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
+            stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
+                    executorContext.getExecutorsBolt().timerService()));
+            stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
+                    kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+            processMainInput(elem);
+        } else {
+            processSideInput(tag, elem);
+        }
+    }
+
+    @Override
+    public void onTimer(Object key, TimerInternals.TimerData timerData) {
+        stepContext.setStateInternals(new JStormStateInternals<>(key,
+                kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+        super.onTimer(key, timerData);
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java
new file mode 100644
index 0000000..47db018
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import org.apache.beam.runners.core.TimerInternals;
+import org.joda.time.Instant;
+
+import java.io.Serializable;
+import java.util.List;
+
+/**
+ * Interface that tracks input watermarks and manages timers in each bolt.
+ */
+public interface TimerService extends Serializable {
+
+    void init(List<Integer> upStreamTasks);
+
+    /**
+     *
+     * @param task
+     * @param inputWatermark
+     * @return new watermark if any timer is triggered during the update of watermark, otherwise 0
+     */
+    long updateInputWatermark(Integer task, long inputWatermark);
+
+    long currentInputWatermark();
+
+    long currentOutputWatermark();
+
+    void clearWatermarkHold(String namespace);
+
+    void addWatermarkHold(String namespace, Instant watermarkHold);
+
+    void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor);
+
+    void fireTimers(long newWatermark);
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
new file mode 100644
index 0000000..3b864d5
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import avro.shaded.com.google.common.collect.Maps;
+import avro.shaded.com.google.common.collect.Sets;
+import com.alibaba.jstorm.utils.Pair;
+import org.apache.beam.runners.core.TimerInternals;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.state.TimeDomain;
+import org.joda.time.Instant;
+
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
+/**
+ * Default implementation of {@link TimerService}.
+ */
+public class TimerServiceImpl implements TimerService {
+    private transient ExecutorContext executorContext;
+    private transient Map<Integer, DoFnExecutor> idToDoFnExecutor;
+
+    private final ConcurrentMap<Integer, Long> upStreamTaskToInputWatermark = new ConcurrentHashMap<>();
+    private final PriorityQueue<Long> inputWatermarks = new PriorityQueue<>();
+    private final PriorityQueue<Instant> watermarkHolds = new PriorityQueue<>();
+    private final Map<String, Instant> namespaceToWatermarkHold = new HashMap<>();
+    private transient final PriorityQueue<TimerInternals.TimerData> eventTimeTimersQueue = new PriorityQueue<>();
+    private final Map<TimerInternals.TimerData, Set<Pair<Integer, Object>>>
+            timerDataToKeyedExecutors = Maps.newHashMap();
+
+    private boolean initialized = false;
+
+    public TimerServiceImpl() {
+    }
+
+    public TimerServiceImpl(ExecutorContext executorContext) {
+        this.executorContext = executorContext;
+        this.idToDoFnExecutor = executorContext.getExecutorsBolt().getIdToDoFnExecutor();
+    }
+
+    @Override
+    public void init(List<Integer> upStreamTasks) {
+        for (Integer task : upStreamTasks) {
+            upStreamTaskToInputWatermark.put(task, BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
+            inputWatermarks.add(BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
+        }
+        initialized = true;
+    }
+
+    @Override
+    public synchronized long updateInputWatermark(Integer task, long taskInputWatermark) {
+        checkState(initialized, "TimerService has not been initialized.");
+        Long oldTaskInputWatermark = upStreamTaskToInputWatermark.get(task);
+        // Make sure the input watermark don't go backward.
+        if (taskInputWatermark > oldTaskInputWatermark) {
+            upStreamTaskToInputWatermark.put(task, taskInputWatermark);
+            inputWatermarks.add(taskInputWatermark);
+            inputWatermarks.remove(oldTaskInputWatermark);
+
+            long newLocalInputWatermark = currentInputWatermark();
+            if (newLocalInputWatermark > oldTaskInputWatermark) {
+                return newLocalInputWatermark;
+            }
+        }
+        return 0;
+    }
+
+    @Override
+    public void fireTimers(long newWatermark) {
+        TimerInternals.TimerData timerData;
+        while ((timerData = eventTimeTimersQueue.peek()) != null
+                && timerData.getTimestamp().getMillis() <= newWatermark) {
+            for (Pair<Integer, Object> keyedExecutor : timerDataToKeyedExecutors.get(timerData)) {
+                DoFnExecutor executor = idToDoFnExecutor.get(keyedExecutor.getFirst());
+                executor.onTimer(keyedExecutor.getSecond(), timerData);
+            }
+            eventTimeTimersQueue.remove();
+            timerDataToKeyedExecutors.remove(timerData);
+        }
+    }
+
+    @Override
+    public long currentInputWatermark() {
+        return initialized ? inputWatermarks.peek() : BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis();
+    }
+
+    @Override
+    public long currentOutputWatermark() {
+        if (watermarkHolds.isEmpty()) {
+            return currentInputWatermark();
+        } else {
+            return Math.min(currentInputWatermark(), watermarkHolds.peek().getMillis());
+        }
+    }
+
+    @Override
+    public void clearWatermarkHold(String namespace) {
+        Instant currentHold = namespaceToWatermarkHold.get(namespace);
+        if (currentHold != null) {
+            watermarkHolds.remove(currentHold);
+            namespaceToWatermarkHold.remove(namespace);
+        }
+    }
+
+    @Override
+    public void addWatermarkHold(String namespace, Instant watermarkHold) {
+        Instant currentHold = namespaceToWatermarkHold.get(namespace);
+        if (currentHold == null) {
+            namespaceToWatermarkHold.put(namespace, watermarkHold);
+            watermarkHolds.add(watermarkHold);
+        } else if (currentHold != null && watermarkHold.isBefore(currentHold)) {
+            namespaceToWatermarkHold.put(namespace, watermarkHold);
+            watermarkHolds.add(watermarkHold);
+            watermarkHolds.remove(currentHold);
+        }
+    }
+
+    @Override
+    public void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor) {
+        checkArgument(
+                TimeDomain.EVENT_TIME.equals(timerData.getDomain()),
+                String.format("Does not support domain: %s.", timerData.getDomain()));
+        Set<Pair<Integer, Object>> keyedExecutors = timerDataToKeyedExecutors.get(timerData);
+        if (keyedExecutors == null) {
+            keyedExecutors = Sets.newHashSet();
+            eventTimeTimersQueue.add(timerData);
+        }
+        keyedExecutors.add(new Pair<>(doFnExecutor.getInternalDoFnExecutorId(), key));
+        timerDataToKeyedExecutors.put(timerData, keyedExecutors);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
new file mode 100644
index 0000000..0fb88ab
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Tuple;
+import com.alibaba.jstorm.cache.IKvStore;
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.alibaba.jstorm.transactional.bolt.ITransactionStatefulBoltExecutor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Map;
+
+public class TxExecutorsBolt implements ITransactionStatefulBoltExecutor {
+    private static final Logger LOG = LoggerFactory.getLogger(TxExecutorsBolt.class);
+
+    private static final String TIME_SERVICE_STORE_ID = "timer_service_store";
+    private static final String TIMER_SERVICE_KET = "timer_service_key";
+
+    private ExecutorsBolt executorsBolt;
+    private IKvStoreManager kvStoreManager;
+    private IKvStore<String, TimerService> timerServiceStore;
+
+    public TxExecutorsBolt(ExecutorsBolt executorsBolt) {
+        this.executorsBolt = executorsBolt;
+        this.executorsBolt.setStatefulBolt(true);
+    }
+
+    @Override
+    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+        try {
+            executorsBolt.prepare(stormConf, context, collector);
+            kvStoreManager = executorsBolt.getExecutorContext().getKvStoreManager();
+            timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
+        } catch (IOException e) {
+            LOG.error("Failed to prepare stateful bolt", e);
+            throw new RuntimeException(e.getMessage());
+        }
+    }
+
+    @Override
+    public void execute(Tuple input) {
+        executorsBolt.execute(input);
+    }
+
+    @Override
+    public void cleanup() {
+        executorsBolt.cleanup();
+    }
+
+    @Override
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        executorsBolt.declareOutputFields(declarer);
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return executorsBolt.getComponentConfiguration();
+    }
+
+    @Override
+    public void initState(Object userState) {
+        LOG.info("Begin to init from state: {}", userState);
+        restore(userState);
+    }
+
+    @Override
+    public Object finishBatch(long batchId) {
+        try {
+            timerServiceStore.put(TIMER_SERVICE_KET, executorsBolt.timerService());
+        } catch (IOException e) {
+            LOG.error("Failed to store current timer service status", e);
+            throw new RuntimeException(e.getMessage());
+        }
+        kvStoreManager.checkpoint(batchId);
+        return null;
+    }
+
+    @Override
+    public Object commit(long batchId, Object state) {
+        return kvStoreManager.backup(batchId);
+    }
+
+    @Override
+    public void rollBack(Object userState) {
+        LOG.info("Begin to rollback from state: {}", userState);
+        restore(userState);
+    }
+
+    @Override
+    public void ackCommit(long batchId, long timeStamp) {
+        kvStoreManager.remove(batchId);
+    }
+
+    private void restore(Object userState) {
+        try {
+            // restore all states
+            kvStoreManager.restore(userState);
+
+            // init timer service
+            timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
+            TimerService timerService = timerServiceStore.get(TIMER_SERVICE_KET);
+            if (timerService == null) {
+                timerService = executorsBolt.initTimerService();
+            }
+            executorsBolt.setTimerService(timerService);
+        } catch (IOException e) {
+            LOG.error("Failed to restore state", e);
+            throw new RuntimeException(e.getMessage());
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
new file mode 100644
index 0000000..22dd07b
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import backtype.storm.spout.SpoutOutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import com.alibaba.jstorm.cache.IKvStore;
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.alibaba.jstorm.cache.KvStoreManagerFactory;
+import com.alibaba.jstorm.transactional.spout.ITransactionSpoutExecutor;
+import org.apache.beam.sdk.io.UnboundedSource;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Map;
+
+public class TxUnboundedSourceSpout implements ITransactionSpoutExecutor {
+    private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(TxUnboundedSourceSpout.class);
+
+    private static final String SOURCE_STORE_ID = "SourceCheckpoint";
+    private static final String CHECKPOINT_MARK = "CheckpointMark";
+
+    private UnboundedSourceSpout sourceSpout;
+    private UnboundedSource.UnboundedReader reader;
+    private IKvStoreManager kvStoreManager;
+    private IKvStore<String, UnboundedSource.CheckpointMark> sourceCheckpointStore;
+
+    public TxUnboundedSourceSpout(UnboundedSourceSpout sourceSpout) {
+        this.sourceSpout = sourceSpout;
+    }
+
+    private void restore(Object userState) {
+        try {
+            kvStoreManager.restore(userState);
+            sourceCheckpointStore = kvStoreManager.getOrCreate(SOURCE_STORE_ID);
+            UnboundedSource.CheckpointMark checkpointMark = sourceCheckpointStore.get(CHECKPOINT_MARK);
+            sourceSpout.createSourceReader(checkpointMark);
+            reader = sourceSpout.getUnboundedSourceReader();
+        } catch (IOException e) {
+            LOG.error("Failed to init state", e);
+            throw new RuntimeException(e.getMessage());
+        }
+    }
+
+    @Override
+    public void initState(Object userState) {
+        restore(userState);
+    }
+
+    @Override
+    public Object finishBatch(long checkpointId) {
+        try {
+            // Store check point mark from unbounded source reader
+            UnboundedSource.CheckpointMark checkpointMark = reader.getCheckpointMark();
+            sourceCheckpointStore.put(CHECKPOINT_MARK, checkpointMark);
+
+            // checkpoint all kv stores in current manager
+            kvStoreManager.checkpoint(checkpointId);
+        } catch (IOException e) {
+            LOG.error(String.format("Failed to finish batch-%s", checkpointId), e);
+            throw new RuntimeException(e.getMessage());
+        }
+        return null;
+    }
+
+    @Override
+    public Object commit(long batchId, Object state) {
+        // backup kv stores to remote state backend
+        return kvStoreManager.backup(batchId);
+    }
+
+    @Override
+    public void rollBack(Object userState) {
+        restore(userState);
+    }
+
+    @Override
+    public void ackCommit(long batchId, long timeStamp) {
+        // remove obsolete state in bolt local and remote state backend
+        kvStoreManager.remove(batchId);
+    }
+
+    @Override
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+        sourceSpout.declareOutputFields(declarer);
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return sourceSpout.getComponentConfiguration();
+    }
+
+    @Override
+    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+        try {
+            sourceSpout.open(conf, context, collector);
+            String storeName = String.format("task-%s", context.getThisTaskId());
+            String storePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
+            kvStoreManager = KvStoreManagerFactory.getKvStoreManagerWithMonitor(context, storeName, storePath, true);
+
+            reader = sourceSpout.getUnboundedSourceReader();
+        } catch (IOException e) {
+            LOG.error("Failed to open transactional unbounded source spout", e);
+            throw new RuntimeException(e.getMessage());
+        }
+    }
+
+    @Override
+    public void close() {
+        sourceSpout.close();
+    }
+
+    @Override
+    public void activate() {
+        sourceSpout.activate();
+    }
+
+    @Override
+    public void deactivate() {
+        sourceSpout.deactivate();
+    }
+
+    @Override
+    public void nextTuple() {
+        sourceSpout.nextTuple();
+    }
+
+    @Override
+    public void ack(Object msgId) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void fail(Object msgId) {
+        throw new UnsupportedOperationException();
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
new file mode 100644
index 0000000..973f703
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
@@ -0,0 +1,193 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
+import com.alibaba.jstorm.utils.KryoSerializer;
+import org.apache.beam.sdk.io.UnboundedSource;
+import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
+import org.apache.beam.sdk.transforms.windowing.PaneInfo;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.TupleTag;
+
+import org.apache.beam.runners.jstorm.StormPipelineOptions;
+import org.apache.beam.runners.jstorm.util.SerializedPipelineOptions;
+
+import backtype.storm.spout.SpoutOutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.tuple.Values;
+
+import org.joda.time.Instant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+/**
+ * Spout implementation that wraps a Beam UnboundedSource
+ *
+ * TODO: add wrapper to support metrics in UnboundedSource.
+ */
+public class UnboundedSourceSpout extends AdaptorBasicSpout {
+    private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceSpout.class);
+
+    private final String description;
+    private final UnboundedSource source;
+    private final SerializedPipelineOptions serializedOptions;
+    private final TupleTag<?> outputTag;
+
+    private transient StormPipelineOptions pipelineOptions;
+    private transient UnboundedSource.UnboundedReader reader;
+    private transient SpoutOutputCollector collector;
+
+    private volatile boolean hasNextRecord;
+    private AtomicBoolean activated = new AtomicBoolean();
+
+    private KryoSerializer<WindowedValue> serializer;
+
+    private long lastWaterMark = 0l;
+
+    public UnboundedSourceSpout(
+            String description,
+            UnboundedSource source,
+            StormPipelineOptions options,
+            TupleTag<?> outputTag) {
+        this.description = checkNotNull(description, "description");
+        this.source = checkNotNull(source, "source");
+        this.serializedOptions = new SerializedPipelineOptions(checkNotNull(options, "options"));
+        this.outputTag = checkNotNull(outputTag, "outputTag");
+    }
+
+    @Override
+    public synchronized void close() {
+        try {
+            activated.set(false);
+            this.reader.close();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    @Override
+    public void activate() {
+        activated.set(true);
+        
+    }
+
+    @Override
+    public void deactivate() {
+        activated.set(false);
+    }
+
+    @Override
+    public void ack(Object msgId) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void fail(Object msgId) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public Map<String, Object> getComponentConfiguration() {
+        return null;
+    }
+
+    @Override
+    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+        try {
+            this.collector = collector;
+            this.pipelineOptions = this.serializedOptions.getPipelineOptions().as(StormPipelineOptions.class);
+
+            createSourceReader(null);
+
+            this.serializer = new KryoSerializer<>(conf);
+        } catch (IOException e) {
+            throw new RuntimeException("Unable to create unbounded reader.", e);
+        }
+    }
+
+    public void createSourceReader(UnboundedSource.CheckpointMark checkpointMark) throws IOException {
+        if (reader != null) {
+            reader.close();
+        }
+        reader = this.source.createReader(this.pipelineOptions, checkpointMark);
+        hasNextRecord = this.reader.start();
+    }
+
+    @Override
+    public synchronized void nextTuple() {
+        if (!activated.get()) {
+            return;
+        }
+        try {
+            if (!hasNextRecord) {
+                hasNextRecord = reader.advance();
+            }
+
+            while (hasNextRecord && activated.get()) {
+                Object value = reader.getCurrent();
+                Instant timestamp = reader.getCurrentTimestamp();
+
+                WindowedValue wv = WindowedValue.of(value, timestamp, GlobalWindow.INSTANCE, PaneInfo.NO_FIRING);
+                LOG.debug("Source output: " + wv.getValue());
+                if (keyedEmit(outputTag.getId())) {
+                    KV kv = (KV) wv.getValue();
+                    // Convert WindowedValue<KV> to <K, WindowedValue<V>>
+                    byte[] immutableValue = serializer.serialize(wv.withValue(kv.getValue()));
+                    collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableValue));
+                } else {
+                    byte[] immutableValue = serializer.serialize(wv);
+                    collector.emit(outputTag.getId(), new Values(immutableValue));
+                }
+
+                // move to next record
+                hasNextRecord = reader.advance();
+            }
+
+            Instant waterMark = reader.getWatermark();
+            if (waterMark != null && lastWaterMark <  waterMark.getMillis()) {
+                lastWaterMark = waterMark.getMillis();
+                collector.flush();
+                collector.emit(CommonInstance.BEAM_WATERMARK_STREAM_ID, new Values(waterMark.getMillis()));
+                LOG.debug("Source output: WM-{}", waterMark.toDateTime());
+            }
+        } catch (IOException e) {
+            throw new RuntimeException("Exception reading values from source.", e);
+        }
+    }
+
+    public UnboundedSource getUnboundedSource() {
+        return source;
+    }
+
+    public UnboundedSource.UnboundedReader getUnboundedSourceReader() {
+        return reader;
+    }
+
+    @Override
+    public String toString() {
+        return description;
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java
new file mode 100644
index 0000000..7b0e8db
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import org.apache.beam.sdk.transforms.View;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ * JStorm {@link Executor} for {@link View}.
+ */
+public class ViewExecutor implements Executor {
+
+    private final String description;
+    private final TupleTag outputTag;
+    private ExecutorsBolt executorsBolt;
+
+    public ViewExecutor(String description, TupleTag outputTag) {
+        this.description = description;
+        this.outputTag = outputTag;
+    }
+
+    @Override
+    public void init(ExecutorContext context) {
+        this.executorsBolt = context.getExecutorsBolt();
+    }
+
+    @Override
+    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+        executorsBolt.processExecutorElem(outputTag, elem);
+    }
+
+    @Override
+    public void cleanup() {}
+
+    @Override
+    public String toString() {
+        return description;
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
new file mode 100644
index 0000000..a6c3c16
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime;
+
+import com.google.common.collect.Iterables;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.transforms.windowing.WindowFn;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.TupleTag;
+import org.joda.time.Instant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collection;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+public class WindowAssignExecutor<T, W extends BoundedWindow> implements Executor {
+    private static final Logger LOG = LoggerFactory.getLogger(WindowAssignExecutor.class);
+
+    private final String description;
+    private WindowFn<T, W> windowFn;
+    private ExecutorsBolt executorsBolt;
+    private TupleTag outputTag;
+
+    class JStormAssignContext<InputT, W extends BoundedWindow>
+            extends WindowFn<InputT, W>.AssignContext {
+        private final WindowedValue<InputT> value;
+
+        JStormAssignContext(WindowFn<InputT, W> fn, WindowedValue<InputT> value) {
+            fn.super();
+            checkArgument(
+                    Iterables.size(value.getWindows()) == 1,
+                    String.format(
+                            "%s passed to window assignment must be in a single window, but it was in %s: %s",
+                            WindowedValue.class.getSimpleName(),
+                            Iterables.size(value.getWindows()),
+                            value.getWindows()));
+            this.value = value;
+        }
+
+        @Override
+        public InputT element() {
+            return value.getValue();
+        }
+
+        @Override
+        public Instant timestamp() {
+            return value.getTimestamp();
+        }
+
+        @Override
+        public BoundedWindow window() {
+            return Iterables.getOnlyElement(value.getWindows());
+        }
+    }
+
+    public WindowAssignExecutor(String description, WindowFn<T, W> windowFn, TupleTag outputTag) {
+        this.description = description;
+        this.windowFn = windowFn;
+        this.outputTag = outputTag;
+    }
+
+    @Override
+    public void init(ExecutorContext context) {
+        this.executorsBolt = context.getExecutorsBolt();
+    }
+
+    @Override
+    public void process(TupleTag tag, WindowedValue elem) {
+        Collection<W> windows = null;
+        try {
+            windows = windowFn.assignWindows(new JStormAssignContext<>(windowFn, elem));
+            for (W window: windows) {
+                executorsBolt.processExecutorElem(
+                        outputTag,
+                        WindowedValue.of(elem.getValue(), elem.getTimestamp(), window, elem.getPane()));
+            }
+        } catch (Exception e) {
+            LOG.warn("Failed to assign windows for elem=" + elem, e);
+        }
+    }
+
+    @Override
+    public void cleanup() {}
+
+
+    @Override
+    public String toString() {
+        return description;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
new file mode 100644
index 0000000..eaf0549
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
@@ -0,0 +1,178 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime.state;
+
+import com.alibaba.jstorm.cache.ComposedKey;
+import com.alibaba.jstorm.cache.IKvStore;
+import com.alibaba.jstorm.cache.KvStoreIterable;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.sdk.state.BagState;
+import org.apache.beam.sdk.state.ReadableState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Nullable;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+/**
+ * JStorm implementation of {@link BagState}.
+ */
+class JStormBagState<K, T> implements BagState<T> {
+    private static final Logger LOG = LoggerFactory.getLogger(JStormBagState.class);
+
+    @Nullable
+    private final K key;
+    private final StateNamespace namespace;
+    private final IKvStore<ComposedKey, T> kvState;
+    private final IKvStore<ComposedKey, Object> stateInfoKvState;
+    private int elemIndex;
+
+    public JStormBagState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState,
+                           IKvStore<ComposedKey, Object> stateInfoKvState) throws IOException {
+        this.key = key;
+        this.namespace = checkNotNull(namespace, "namespace");
+        this.kvState = checkNotNull(kvState, "kvState");
+        this.stateInfoKvState = checkNotNull(stateInfoKvState, "stateInfoKvState");
+
+        Integer index = (Integer) stateInfoKvState.get(getComposedKey());
+        this.elemIndex =  index != null ? ++index : 0;
+    }
+
+    @Override
+    public void add(T input) {
+        try {
+            kvState.put(getComposedKey(elemIndex), input);
+            stateInfoKvState.put(getComposedKey(), elemIndex);
+            elemIndex++;
+        } catch (IOException e) {
+            throw new RuntimeException(e.getCause());
+        }
+    }
+
+    @Override
+    public ReadableState<Boolean> isEmpty() {
+        return new ReadableState<Boolean>() {
+            @Override
+            public Boolean read() {
+                return elemIndex <= 0;
+            }
+
+            @Override
+            public ReadableState<Boolean> readLater() {
+                // TODO: support prefetch.
+                return this;
+            }
+        };
+    }
+
+    @Override
+    public Iterable<T> read() {
+        return new BagStateIterable(elemIndex);
+    }
+
+    @Override
+    public BagState readLater() {
+        // TODO: support prefetch.
+        return this;
+    }
+
+    @Override
+    public void clear() {
+        try {
+            for (int i = 0; i < elemIndex; i++) {
+                kvState.remove(getComposedKey(i));
+            }
+            stateInfoKvState.remove(getComposedKey());
+            elemIndex = 0;
+        } catch (IOException e) {
+            throw new RuntimeException(e.getCause());
+        }
+    }
+
+    private ComposedKey getComposedKey() {
+        return ComposedKey.of(key, namespace);
+    }
+
+    private ComposedKey getComposedKey(int elemIndex) {
+        return ComposedKey.of(key, namespace, elemIndex);
+    }
+
+    private class BagStateIterable implements KvStoreIterable<T> {
+
+        private class BagStateIterator implements Iterator<T> {
+            private final int size;
+            private int cursor = 0;
+
+            BagStateIterator() {
+                Integer s = null;
+                try {
+                    s = (Integer) stateInfoKvState.get(getComposedKey());
+                } catch (IOException e) {
+                    LOG.error("Failed to get elemIndex for key={}", getComposedKey());
+                }
+                this.size = s != null ? ++s : 0;
+            }
+
+            @Override
+            public boolean hasNext() {
+                return cursor < size;
+            }
+
+            @Override
+            public T next() {
+                if (cursor >= size) {
+                    throw new NoSuchElementException();
+                }
+
+                T value = null;
+                try {
+                    value = kvState.get(getComposedKey(cursor));
+                } catch (IOException e) {
+                    LOG.error("Failed to read composed key-[{}]", getComposedKey(cursor));
+                }
+                cursor++;
+                return value;
+            }
+
+            @Override
+            public void remove() {
+                throw new UnsupportedOperationException();
+            }
+        }
+
+        private final int size;
+
+        BagStateIterable(int size) {
+            this.size = size;
+        }
+
+        @Override
+        public Iterator<T> iterator() {
+            return new BagStateIterator();
+        }
+
+        @Override
+        public String toString() {
+            return String.format("BagStateIterable: composedKey=%s", getComposedKey());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java
new file mode 100644
index 0000000..b0fe29b
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation.runtime.state;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import javax.annotation.Nullable;
+
+import org.apache.beam.sdk.state.BagState;
+import org.apache.beam.sdk.state.CombiningState;
+import org.apache.beam.sdk.state.ReadableState;
+import org.apache.beam.sdk.transforms.Combine;
+
+/**
+ * JStorm implementation of {@link CombiningState}.
+ */
+public class JStormCombiningState<InputT, AccumT, OutputT>
+        implements CombiningState<InputT, AccumT, OutputT> {
+
+    @Nullable
+    private final BagState<AccumT> accumBagState;
+    private final Combine.CombineFn<InputT, AccumT, OutputT> combineFn;
+    JStormCombiningState(
+            BagState<AccumT> accumBagState,
+            Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
+        this.accumBagState = checkNotNull(accumBagState, "accumBagState");
+        this.combineFn = checkNotNull(combineFn, "combineFn");
+    }
+
+    @Override
+    public AccumT getAccum() {
+        // TODO: replacing the accumBagState with the merged accum.
+        return combineFn.mergeAccumulators(accumBagState.read());
+    }
+
+    @Override
+    public void addAccum(AccumT accumT) {
+        accumBagState.add(accumT);
+    }
+
+    @Override
+    public AccumT mergeAccumulators(Iterable<AccumT> iterable) {
+        return combineFn.mergeAccumulators(iterable);
+    }
+
+    @Override
+    public void add(InputT input) {
+        accumBagState.add(
+                combineFn.addInput(combineFn.createAccumulator(), input));
+    }
+
+    @Override
+    public ReadableState<Boolean> isEmpty() {
+        return accumBagState.isEmpty();
+    }
+
+    @Override
+    public OutputT read() {
+        return combineFn.extractOutput(
+            combineFn.mergeAccumulators(accumBagState.read()));
+    }
+
+    @Override
+    public CombiningState<InputT, AccumT, OutputT> readLater() {
+        // TODO: support prefetch.
+        return this;
+    }
+
+    @Override
+    public void clear() {
+        accumBagState.clear();
+    }
+}


[08/53] [abbrv] beam git commit: jstorm-runner: rename the package to org.apache.beam.runners.jstorm.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/TranslationContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/TranslationContext.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/TranslationContext.java
deleted file mode 100644
index c3e9805..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/TranslationContext.java
+++ /dev/null
@@ -1,425 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation;
-
-import avro.shaded.com.google.common.collect.Lists;
-import com.alibaba.jstorm.beam.translation.translator.Stream;
-import com.alibaba.jstorm.beam.util.RunnerUtils;
-import com.google.common.base.Strings;
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-import org.apache.beam.sdk.runners.AppliedPTransform;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.PValueBase;
-import org.apache.beam.sdk.values.TaggedPValue;
-import org.apache.beam.sdk.values.TupleTag;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.jstorm.beam.StormPipelineOptions;
-import com.alibaba.jstorm.beam.translation.runtime.AdaptorBasicSpout;
-import com.alibaba.jstorm.beam.translation.runtime.Executor;
-import com.alibaba.jstorm.beam.translation.runtime.ExecutorsBolt;
-import com.alibaba.jstorm.beam.translation.util.CommonInstance;
-
-import java.util.*;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-import static com.google.common.base.Preconditions.checkState;
-
-/**
- * Maintains the state necessary during Pipeline translation to build a Storm topology.
- */
-public class TranslationContext {
-    private static final Logger LOG = LoggerFactory.getLogger(TranslationContext.class);
-
-    private final UserGraphContext userGraphContext;
-    private final ExecutionGraphContext executionGraphContext;
-
-    public TranslationContext(StormPipelineOptions options) {
-        this.userGraphContext = new UserGraphContext(options);
-        this.executionGraphContext = new ExecutionGraphContext();
-    }
-
-    public ExecutionGraphContext getExecutionGraphContext() {
-        return executionGraphContext;
-    }
-
-    public UserGraphContext getUserGraphContext() {
-        return userGraphContext;
-    }
-
-    private void addStormStreamDef(TaggedPValue input, String destComponentName, Stream.Grouping grouping) {
-        Stream.Producer producer = executionGraphContext.getProducer(input.getValue());
-        if (!producer.getComponentId().equals(destComponentName)) {
-            Stream.Consumer consumer = Stream.Consumer.of(destComponentName, grouping);
-            executionGraphContext.registerStreamConsumer(consumer, producer);
-
-            ExecutorsBolt executorsBolt = executionGraphContext.getBolt(producer.getComponentId());
-            if (executorsBolt != null) {
-                executorsBolt.addExternalOutputTag(input.getTag());
-            }
-        }
-    }
-
-    private String getUpstreamExecutorsBolt() {
-        for (PValue value : userGraphContext.getInputs().values()) {
-            String componentId = executionGraphContext.getProducerComponentId(value);
-            if (componentId != null && executionGraphContext.getBolt(componentId) != null) {
-                return componentId;
-            }
-        }
-        // When upstream component is spout, "null" will be return.
-        return null;
-    }
-
-    /**
-     * check if the current transform is applied to source collection.
-     * @return
-     */
-    private boolean connectedToSource() {
-        for (PValue value : userGraphContext.getInputs().values()) {
-            if (executionGraphContext.producedBySpout(value)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    /**
-     * @param upstreamExecutorsBolt
-     * @return true if there is multiple input streams, or upstream executor output the same stream
-     *          to different executors
-     */
-    private boolean isMultipleInputOrOutput(ExecutorsBolt upstreamExecutorsBolt, Map<TupleTag<?>, PValue> inputs) {
-        if (inputs.size() > 1) {
-            return true;
-        } else {
-            final Sets.SetView<TupleTag> intersection = Sets.intersection(upstreamExecutorsBolt.getExecutors().keySet(), inputs.keySet());
-            if (!intersection.isEmpty()) {
-                // there is already a different executor consume the same input
-                return true;
-            } else {
-                return false;
-            }
-        }
-    }
-
-    public void addTransformExecutor(Executor executor) {
-        addTransformExecutor(executor, Collections.EMPTY_LIST);
-    }
-
-    public void addTransformExecutor(Executor executor, List<PValue> sideInputs) {
-        addTransformExecutor(executor, userGraphContext.getInputs(), userGraphContext.getOutputs(), sideInputs);
-    }
-
-    public void addTransformExecutor(Executor executor, Map<TupleTag<?>, PValue> inputs, Map<TupleTag<?>, PValue> outputs) {
-        addTransformExecutor(executor, inputs, outputs, Collections.EMPTY_LIST);
-    }
-
-    public void addTransformExecutor(Executor executor, Map<TupleTag<?>, PValue> inputs, Map<TupleTag<?>, PValue> outputs, List<PValue> sideInputs) {
-        String name = null;
-
-        ExecutorsBolt bolt = null;
-
-        boolean isGBK = false;
-        /**
-         * Check if the transform executor needs to be chained into an existing ExecutorsBolt.
-         * For following cases, a new bolt is created for the specified executor, otherwise the executor
-         * will be added into the bolt contains corresponding upstream executor.
-         * a) it is a GroupByKey executor
-         * b) it is connected to source directly
-         * c) None existing upstream bolt was found
-         * d) For the purpose of performance to reduce the side effects between multiple streams which
-         *    is output to same executor, a new bolt will be created.
-         */
-        if (RunnerUtils.isGroupByKeyExecutor(executor)) {
-            bolt = new ExecutorsBolt();
-            name = executionGraphContext.registerBolt(bolt);
-            isGBK = true;
-        } else if (connectedToSource()) {
-            bolt = new ExecutorsBolt();
-            name = executionGraphContext.registerBolt(bolt);
-        } else {
-            name = getUpstreamExecutorsBolt();
-            if (name == null) {
-                bolt = new ExecutorsBolt();
-                name = executionGraphContext.registerBolt(bolt);
-            } else {
-                bolt = executionGraphContext.getBolt(name);
-                if (isMultipleInputOrOutput(bolt, inputs)) {
-                    bolt = new ExecutorsBolt();
-                    name = executionGraphContext.registerBolt(bolt);
-                }
-            }
-        }
-
-        // update the output tags of current transform into ExecutorsBolt
-        for (Map.Entry<TupleTag<?>, PValue> entry : outputs.entrySet()) {
-            TupleTag tag = entry.getKey();
-            PValue value = entry.getValue();
-
-            // use tag of PValueBase
-            if (value instanceof PValueBase) {
-                tag = ((PValueBase) value).expand().keySet().iterator().next();
-            }
-            executionGraphContext.registerStreamProducer(
-                    TaggedPValue.of(tag, value),
-                    Stream.Producer.of(name, tag.getId(), value.getName()));
-            //bolt.addOutputTags(tag);
-        }
-
-        // add the transform executor into the chain of ExecutorsBolt
-        for (Map.Entry<TupleTag<?>, PValue> entry : inputs.entrySet()) {
-            TupleTag tag = entry.getKey();
-            PValue value = entry.getValue();
-            bolt.addExecutor(tag, executor);
-
-            // filter all connections inside bolt
-            //if (!bolt.getOutputTags().contains(tag)) {
-                Stream.Grouping grouping;
-                if (isGBK) {
-                    grouping = Stream.Grouping.byFields(Arrays.asList(CommonInstance.KEY));
-                } else {
-                    grouping = Stream.Grouping.of(Stream.Grouping.Type.LOCAL_OR_SHUFFLE);
-                }
-                addStormStreamDef(TaggedPValue.of(tag, value), name, grouping);
-            //}
-        }
-
-        for (PValue sideInput : sideInputs) {
-            TupleTag tag = userGraphContext.findTupleTag(sideInput);
-            bolt.addExecutor(tag, executor);
-            checkState(!bolt.getOutputTags().contains(tag));
-            addStormStreamDef(TaggedPValue.of(tag, sideInput), name, Stream.Grouping.of(Stream.Grouping.Type.ALL));
-        }
-
-        bolt.registerExecutor(executor);
-
-        // set parallelismNumber
-        String pTransformfullName = userGraphContext.currentTransform.getFullName();
-        String compositeName = pTransformfullName.split("/")[0];
-        Map parallelismNumMap = userGraphContext.getOptions().getParallelismNumMap();
-        if (parallelismNumMap.containsKey(compositeName)) {
-            int configNum = (Integer) parallelismNumMap.get(compositeName);
-            int currNum = bolt.getParallelismNum();
-            bolt.setParallelismNum(Math.max(configNum, currNum));
-        }
-    }
-
-    // TODO: add getSideInputs() and getSideOutputs().
-    public static class UserGraphContext {
-        private final StormPipelineOptions options;
-        private final Map<PValue, TupleTag> pValueToTupleTag;
-        private AppliedPTransform<?, ?, ?> currentTransform = null;
-
-        private boolean isWindowed = false;
-
-        public UserGraphContext(StormPipelineOptions options) {
-            this.options = checkNotNull(options, "options");
-            this.pValueToTupleTag = Maps.newHashMap();
-        }
-
-        public StormPipelineOptions getOptions() {
-            return this.options;
-        }
-
-        public void setCurrentTransform(AppliedPTransform<?, ?, ?> transform) {
-            this.currentTransform = transform;
-        }
-
-        public String getStepName() {
-            return currentTransform.getFullName();
-        }
-
-        public <T extends PValue> T getInput() {
-            return (T) currentTransform.getInputs().values().iterator().next();
-        }
-
-        public Map<TupleTag<?>, PValue> getInputs() {
-            return currentTransform.getInputs();
-        }
-
-        public TupleTag<?> getInputTag() {
-            return currentTransform.getInputs().keySet().iterator().next();
-        }
-
-        public List<TupleTag<?>> getInputTags() {
-            return Lists.newArrayList(currentTransform.getInputs().keySet());
-        }
-
-        public <T extends PValue> T getOutput() {
-            return (T) currentTransform.getOutputs().values().iterator().next();
-        }
-
-        public Map<TupleTag<?>, PValue> getOutputs() {
-            return currentTransform.getOutputs();
-        }
-
-        public TupleTag<?> getOutputTag() {
-            return currentTransform.getOutputs().keySet().iterator().next();
-        }
-
-        public List<TupleTag<?>> getOutputTags() {
-            return Lists.newArrayList(currentTransform.getOutputs().keySet());
-        }
-
-        public void recordOutputTaggedPValue() {
-            for (Map.Entry<TupleTag<?>, PValue> entry : getOutputs().entrySet()) {
-                pValueToTupleTag.put(entry.getValue(), entry.getKey());
-            }
-        }
-
-        public <T> TupleTag<T> findTupleTag(PValue pValue) {
-            return pValueToTupleTag.get(checkNotNull(pValue, "pValue"));
-        }
-
-        public void setWindowed() {
-            this.isWindowed = true;
-        }
-
-        public boolean isWindowed() {
-            return this.isWindowed;
-        }
-
-        @Override
-        public String toString() {
-            return Joiner.on('\n').join(FluentIterable.from(pValueToTupleTag.entrySet())
-                    .transform(new Function<Map.Entry<PValue,TupleTag>, String>() {
-                        @Override
-                        public String apply(Map.Entry<PValue, TupleTag> entry) {
-                            return String.format("%s == %s", entry.getValue().getId(), entry.getKey().getName());
-                        }}));
-        }
-    }
-
-    public static class ExecutionGraphContext {
-
-        private final Map<String, AdaptorBasicSpout> spoutMap = new HashMap<>();
-        private final Map<String, ExecutorsBolt> boltMap = new HashMap<>();
-
-        // One-to-one mapping between Stream.Producer and TaggedPValue (or PValue).
-        private final Map<PValue, Stream.Producer> pValueToProducer = new HashMap<>();
-        private final Map<Stream.Producer, TaggedPValue> producerToTaggedPValue = new HashMap<>();
-
-        private final List<Stream> streams = new ArrayList<>();
-
-        private int id = 1;
-
-        public void registerSpout(AdaptorBasicSpout spout, TaggedPValue output) {
-            checkNotNull(spout, "spout");
-            checkNotNull(output, "output");
-            String name = "spout" + genId();
-            this.spoutMap.put(name, spout);
-            registerStreamProducer(
-                    output,
-                    Stream.Producer.of(name, output.getTag().getId(), output.getValue().getName()));
-        }
-
-        public AdaptorBasicSpout getSpout(String id) {
-            if (Strings.isNullOrEmpty(id)) {
-                return null;
-            }
-            return this.spoutMap.get(id);
-        }
-
-        public Map<String, AdaptorBasicSpout> getSpouts() {
-            return this.spoutMap;
-        }
-
-        public String registerBolt(ExecutorsBolt bolt) {
-            checkNotNull(bolt, "bolt");
-            String name = "bolt" + genId();
-            this.boltMap.put(name, bolt);
-            return name;
-        }
-
-        public ExecutorsBolt getBolt(String id) {
-            if (Strings.isNullOrEmpty(id)) {
-                return null;
-            }
-            return this.boltMap.get(id);
-        }
-
-        public void registerStreamProducer(TaggedPValue taggedPValue, Stream.Producer producer) {
-            checkNotNull(taggedPValue, "taggedPValue");
-            checkNotNull(producer, "producer");
-            pValueToProducer.put(taggedPValue.getValue(), producer);
-            producerToTaggedPValue.put(producer, taggedPValue);
-        }
-
-        public Stream.Producer getProducer(PValue pValue) {
-            return pValueToProducer.get(checkNotNull(pValue, "pValue"));
-        }
-
-        public String getProducerComponentId(PValue pValue) {
-            Stream.Producer producer = getProducer(pValue);
-            return producer == null ? null : producer.getComponentId();
-        }
-
-        public boolean producedBySpout(PValue pValue) {
-            String componentId = getProducerComponentId(pValue);
-            return getSpout(componentId) != null;
-        }
-
-        public void registerStreamConsumer(Stream.Consumer consumer, Stream.Producer producer) {
-            streams.add(Stream.of(
-                    checkNotNull(producer, "producer"),
-                    checkNotNull(consumer, "consumer")));
-        }
-
-        public Map<PValue, Stream.Producer> getPValueToProducers() {
-            return pValueToProducer;
-        }
-
-        public Iterable<Stream> getStreams() {
-            return streams;
-        }
-
-        @Override
-        public String toString() {
-            List<String> ret = new ArrayList<>();
-            ret.add("SPOUT");
-            for (Map.Entry<String, AdaptorBasicSpout> entry : spoutMap.entrySet()) {
-                ret.add(entry.getKey() + ": " + entry.getValue().toString());
-            }
-            ret.add("BOLT");
-            for (Map.Entry<String, ExecutorsBolt> entry : boltMap.entrySet()) {
-                ret.add(entry.getKey() + ": " + entry.getValue().toString());
-            }
-            ret.add("STREAM");
-            for (Stream stream : streams) {
-                ret.add(String.format(
-                        "%s@@%s ---> %s@@%s",
-                        stream.getProducer().getStreamId(),
-                        stream.getProducer().getComponentId(),
-                        stream.getConsumer().getGrouping(),
-                        stream.getConsumer().getComponentId()));
-            }
-            return Joiner.on("\n").join(ret);
-        }
-
-        private synchronized int genId() {
-            return id++;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/TranslatorRegistry.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/TranslatorRegistry.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/TranslatorRegistry.java
deleted file mode 100644
index 5e92eea..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/TranslatorRegistry.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation;
-
-import com.alibaba.jstorm.beam.translation.translator.*;
-import org.apache.beam.sdk.io.Read;
-import org.apache.beam.sdk.transforms.Flatten;
-import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.windowing.Window;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Lookup table mapping PTransform types to associated TransformTranslator implementations.
- */
-public class TranslatorRegistry {
-    private static final Logger LOG = LoggerFactory.getLogger(TranslatorRegistry.class);
-
-    private static final Map<Class<? extends PTransform>, TransformTranslator> TRANSLATORS = new HashMap<>();
-
-    static {
-        TRANSLATORS.put(Read.Bounded.class, new BoundedSourceTranslator());
-        TRANSLATORS.put(Read.Unbounded.class, new UnboundedSourceTranslator());
-        // TRANSLATORS.put(Write.Bound.class, new WriteSinkStreamingTranslator());
-        // TRANSLATORS.put(TextIO.Write.Bound.class, new TextIOWriteBoundStreamingTranslator());
-
-        TRANSLATORS.put(ParDo.SingleOutput.class, new ParDoBoundTranslator());
-        TRANSLATORS.put(ParDo.MultiOutput.class, new ParDoBoundMultiTranslator());
-
-        //TRANSLATORS.put(Window.Bound.class, new WindowBoundTranslator<>());
-        TRANSLATORS.put(Window.Assign.class, new WindowAssignTranslator<>());
-
-        TRANSLATORS.put(Flatten.PCollections.class, new FlattenTranslator());
-
-        TRANSLATORS.put(GroupByKey.class, new GroupByKeyTranslator());
-
-        TRANSLATORS.put(ViewTranslator.CreateJStormPCollectionView.class, new ViewTranslator());
-
-        /**
-         * Currently, empty translation is required for combine and reshuffle. Because, the transforms will be 
-         * mapped to GroupByKey and Pardo finally. So we only need to translator the finally transforms.
-         * If any improvement is required, the composite transforms will be translated in the future.
-         */
-        // TRANSLATORS.put(Combine.PerKey.class, new CombinePerKeyTranslator());
-        // TRANSLATORS.put(Globally.class, new CombineGloballyTranslator());
-        // TRANSLATORS.put(Reshuffle.class, new ReshuffleTranslator());
-    }
-
-    public static TransformTranslator<?> getTranslator(PTransform<?, ?> transform) {
-        TransformTranslator<?> translator = TRANSLATORS.get(transform.getClass());
-        if (translator == null) {
-            LOG.warn("Unsupported operator={}", transform.getClass().getName());
-        }
-        return translator;
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AbstractComponent.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AbstractComponent.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AbstractComponent.java
deleted file mode 100644
index 876546d..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AbstractComponent.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import com.alibaba.jstorm.beam.translation.util.CommonInstance;
-
-import backtype.storm.topology.IComponent;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.Utils;
-
-/*
- * Enable user to add output stream definitions by API, rather than hard-code.
- */
-public abstract class AbstractComponent implements IComponent {
-    private Map<String, Fields> streamToFields = new HashMap<>();
-    private Map<String, Boolean> keyStreams = new HashMap<>();
-    private int parallelismNum = 0;
-
-    public void addOutputField(String streamId) {
-        addOutputField(streamId, new Fields(CommonInstance.VALUE));
-    }
-
-    public void addOutputField(String streamId, Fields fields) {
-        streamToFields.put(streamId, fields);
-        keyStreams.put(streamId, false);
-    }
-
-    public void addKVOutputField(String streamId) {
-        streamToFields.put(streamId, new Fields(CommonInstance.KEY, CommonInstance.VALUE));
-        keyStreams.put(streamId, true);
-    }
-
-    @Override
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        for (Map.Entry<String, Fields> entry : streamToFields.entrySet()) {
-            declarer.declareStream(entry.getKey(), entry.getValue());
-        }
-    }
-
-    public boolean keyedEmit(String streamId) {
-        Boolean isKeyedStream = keyStreams.get(streamId);
-        return isKeyedStream == null ? false : isKeyedStream;
-    }
-
-    public int getParallelismNum() {
-        return parallelismNum;
-    }
-
-    public void setParallelismNum(int num) {
-        parallelismNum = num;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AdaptorBasicBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AdaptorBasicBolt.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AdaptorBasicBolt.java
deleted file mode 100644
index d1308af..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AdaptorBasicBolt.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import backtype.storm.topology.IRichBatchBolt;
-
-public abstract class AdaptorBasicBolt extends AbstractComponent implements IRichBatchBolt {
-    
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AdaptorBasicSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AdaptorBasicSpout.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AdaptorBasicSpout.java
deleted file mode 100644
index 2f77bfb..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/AdaptorBasicSpout.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import backtype.storm.topology.IRichSpout;
-
-public abstract class AdaptorBasicSpout extends AbstractComponent implements IRichSpout {
-    
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/DoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/DoFnExecutor.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/DoFnExecutor.java
deleted file mode 100644
index 9d88c4d..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/DoFnExecutor.java
+++ /dev/null
@@ -1,330 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import java.io.Serializable;
-import java.util.*;
-
-import avro.shaded.com.google.common.collect.Iterables;
-import com.alibaba.jstorm.beam.translation.runtime.state.JStormStateInternals;
-import com.alibaba.jstorm.beam.translation.runtime.timer.JStormTimerInternals;
-
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.alibaba.jstorm.metric.MetricClient;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
-import org.apache.beam.runners.core.DoFnRunner;
-import org.apache.beam.runners.core.DoFnRunners;
-import org.apache.beam.runners.core.DoFnRunners.OutputManager;
-import org.apache.beam.runners.core.PushbackSideInputDoFnRunner;
-import org.apache.beam.runners.core.SideInputHandler;
-import org.apache.beam.runners.core.SimplePushbackSideInputDoFnRunner;
-import org.apache.beam.runners.core.StateInternals;
-import org.apache.beam.runners.core.StateTag;
-import org.apache.beam.runners.core.StateTags;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.runners.core.StateNamespaces;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.reflect.DoFnInvoker;
-import org.apache.beam.sdk.transforms.reflect.DoFnInvokers;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.runners.core.NullSideInputReader;
-import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.WatermarkHoldState;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.joda.time.Instant;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.jstorm.beam.StormPipelineOptions;
-import com.alibaba.jstorm.beam.translation.util.DefaultStepContext;
-import com.alibaba.jstorm.beam.util.SerializedPipelineOptions;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-
-public class DoFnExecutor<InputT, OutputT> implements Executor {
-    private static final long serialVersionUID = 5297603063991078668L;
-
-    private static final Logger LOG = LoggerFactory.getLogger(DoFnExecutor.class);
-
-    public class DoFnExecutorOutputManager implements OutputManager, Serializable {
-        private static final long serialVersionUID = -661113364735206170L;
-
-        @Override
-        public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-            executorsBolt.processExecutorElem(tag, output);
-        }
-    }
-
-    protected transient DoFnRunner<InputT, OutputT> runner = null;
-    protected transient PushbackSideInputDoFnRunner<InputT, OutputT> pushbackRunner = null;
-
-    protected final String stepName;
-
-    protected int internalDoFnExecutorId;
-
-    protected final String description;
-
-    protected final TupleTag<OutputT> mainTupleTag;
-    protected final List<TupleTag<?>> sideOutputTags;
-
-    protected SerializedPipelineOptions serializedOptions;
-    protected transient StormPipelineOptions pipelineOptions;
-
-    protected DoFn<InputT, OutputT> doFn;
-    protected final Coder<WindowedValue<InputT>> inputCoder;
-    protected DoFnInvoker<InputT, OutputT> doFnInvoker;
-    protected OutputManager outputManager;
-    protected WindowingStrategy<?, ?> windowingStrategy;
-    protected final TupleTag<InputT> mainInputTag;
-    protected Collection<PCollectionView<?>> sideInputs;
-    protected SideInputHandler sideInputHandler;
-    protected final Map<TupleTag, PCollectionView<?>> sideInputTagToView;
-
-    // Initialize during runtime
-    protected ExecutorContext executorContext;
-    protected ExecutorsBolt executorsBolt;
-    protected TimerInternals timerInternals;
-    protected transient StateInternals pushbackStateInternals;
-    protected transient StateTag<BagState<WindowedValue<InputT>>> pushedBackTag;
-    protected transient StateTag<WatermarkHoldState> watermarkHoldTag;
-    protected transient IKvStoreManager kvStoreManager;
-    protected DefaultStepContext stepContext;
-    protected transient MetricClient metricClient;
-
-    public DoFnExecutor(
-            String stepName,
-            String description,
-            StormPipelineOptions pipelineOptions,
-            DoFn<InputT, OutputT> doFn,
-            Coder<WindowedValue<InputT>> inputCoder,
-            WindowingStrategy<?, ?> windowingStrategy,
-            TupleTag<InputT> mainInputTag,
-            Collection<PCollectionView<?>> sideInputs,
-            Map<TupleTag, PCollectionView<?>> sideInputTagToView,
-            TupleTag<OutputT> mainTupleTag,
-            List<TupleTag<?>> sideOutputTags) {
-        this.stepName = checkNotNull(stepName, "stepName");
-        this.description = checkNotNull(description, "description");
-        this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
-        this.doFn = doFn;
-        this.inputCoder = inputCoder;
-        this.outputManager = new DoFnExecutorOutputManager();
-        this.windowingStrategy = windowingStrategy;
-        this.mainInputTag = mainInputTag;
-        this.sideInputs = sideInputs;
-        this.mainTupleTag = mainTupleTag;
-        this.sideOutputTags = sideOutputTags;
-        this.sideInputTagToView = sideInputTagToView;
-    }
-
-    protected DoFnRunner<InputT, OutputT> getDoFnRunner() {
-        return new DoFnRunnerWithMetrics<>(
-            stepName,
-            DoFnRunners.simpleRunner(
-                this.pipelineOptions,
-                this.doFn,
-                this.sideInputHandler == null ? NullSideInputReader.empty() : sideInputHandler,
-                this.outputManager,
-                this.mainTupleTag,
-                this.sideOutputTags,
-                this.stepContext,
-                this.windowingStrategy),
-            MetricsReporter.create(metricClient));
-    }
-
-    protected void initService(ExecutorContext context) {
-        // TODO: what should be set for key in here?
-        timerInternals = new JStormTimerInternals(null /* key */, this, context.getExecutorsBolt().timerService());
-        kvStoreManager = context.getKvStoreManager();
-        stepContext = new DefaultStepContext(timerInternals,
-                new JStormStateInternals(null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-        metricClient = new MetricClient(executorContext.getTopologyContext());
-    }
-
-    @Override
-    public void init(ExecutorContext context) {
-        this.executorContext = context;
-        this.executorsBolt = context.getExecutorsBolt();
-        this.pipelineOptions = this.serializedOptions.getPipelineOptions().as(StormPipelineOptions.class);
-
-        initService(context);
-
-        // Side inputs setup
-        if (sideInputs != null && sideInputs.isEmpty() == false) {
-            pushedBackTag = StateTags.bag("pushed-back-values", inputCoder);
-            watermarkHoldTag =
-                    StateTags.watermarkStateInternal("hold", TimestampCombiner.EARLIEST);
-            pushbackStateInternals = new JStormStateInternals(null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
-            sideInputHandler = new SideInputHandler(sideInputs, pushbackStateInternals);
-            runner = getDoFnRunner();
-            pushbackRunner = SimplePushbackSideInputDoFnRunner.create(runner, sideInputs, sideInputHandler);
-        } else {
-            runner = getDoFnRunner();
-        }
-
-        // Process user's setup
-        doFnInvoker = DoFnInvokers.invokerFor(doFn);
-        doFnInvoker.invokeSetup();
-    }
-
-    @Override
-    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-        LOG.debug(String.format("process: elemTag=%s, mainInputTag=%s, sideInputs=%s, elem={}",
-                tag, mainInputTag, sideInputs, elem.getValue()));
-        if (mainInputTag.equals(tag)) {
-            processMainInput(elem);
-        } else {
-            processSideInput(tag, elem);
-        }
-    }
-
-    protected <T> void processMainInput(WindowedValue<T> elem) {
-       if (sideInputs.isEmpty()) {
-           runner.processElement((WindowedValue<InputT>) elem);
-       } else {
-           Iterable<WindowedValue<InputT>> justPushedBack =
-               pushbackRunner.processElementInReadyWindows((WindowedValue<InputT>) elem);
-           BagState<WindowedValue<InputT>> pushedBack =
-                   pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
-
-           Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
-           for (WindowedValue<InputT> pushedBackValue : justPushedBack) {
-               if (pushedBackValue.getTimestamp().isBefore(min)) {
-                   min = pushedBackValue.getTimestamp();
-               }
-               min = earlier(min, pushedBackValue.getTimestamp());
-               pushedBack.add(pushedBackValue);
-           }
-           pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag).add(min);
-       }
-    }
-
-    protected void processSideInput(TupleTag tag, WindowedValue elem) {
-        LOG.debug(String.format("side inputs: %s, %s.", tag, elem));
-
-        PCollectionView<?> sideInputView = sideInputTagToView.get(tag);
-        sideInputHandler.addSideInputValue(sideInputView, elem);
-
-        BagState<WindowedValue<InputT>> pushedBack =
-                pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
-
-        List<WindowedValue<InputT>> newPushedBack = new ArrayList<>();
-
-        Iterable<WindowedValue<InputT>> pushedBackInputs = pushedBack.read();
-        if (pushedBackInputs != null) {
-            for (WindowedValue<InputT> input : pushedBackInputs) {
-
-                Iterable<WindowedValue<InputT>> justPushedBack =
-                        pushbackRunner.processElementInReadyWindows(input);
-                Iterables.addAll(newPushedBack, justPushedBack);
-            }
-        }
-        pushedBack.clear();
-
-        Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
-        for (WindowedValue<InputT> pushedBackValue : newPushedBack) {
-            min = earlier(min, pushedBackValue.getTimestamp());
-            pushedBack.add(pushedBackValue);
-        }
-
-        WatermarkHoldState watermarkHold =
-                pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
-        // TODO: clear-then-add is not thread-safe.
-        watermarkHold.clear();
-        watermarkHold.add(min);
-    }
-
-    /**
-     * Process all pushed back elements when receiving watermark with max timestamp
-     */
-    public void processAllPushBackElements() {
-        if (sideInputs != null && sideInputs.isEmpty() == false) {
-            BagState<WindowedValue<InputT>> pushedBackElements =
-                    pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
-            if (pushedBackElements != null) {
-                for (WindowedValue<InputT> elem : pushedBackElements.read()) {
-                    LOG.info("Process pushback elem={}", elem);
-                    runner.processElement(elem);
-                }
-                pushedBackElements.clear();
-            }
-
-            WatermarkHoldState watermarkHold =
-                    pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
-            watermarkHold.clear();
-            watermarkHold.add(BoundedWindow.TIMESTAMP_MAX_VALUE);
-        }
-    }
-
-    public void onTimer(Object key, TimerInternals.TimerData timerData) {
-        StateNamespace namespace = timerData.getNamespace();
-        checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
-        BoundedWindow window = ((StateNamespaces.WindowNamespace) namespace).getWindow();
-        if (pushbackRunner != null) {
-            pushbackRunner.onTimer(timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
-        } else {
-            runner.onTimer(timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
-        }
-    }
-
-    @Override
-    public void cleanup() {
-        doFnInvoker.invokeTeardown();
-    }
-
-    @Override
-    public String toString() {
-        return description;
-    }
-
-    private Instant earlier(Instant left, Instant right) {
-        return left.isBefore(right) ? left : right;
-    }
-
-    public void startBundle() {
-        if (pushbackRunner != null) {
-            pushbackRunner.startBundle();
-        } else {
-            runner.startBundle();
-        }
-    }
-
-    public void finishBundle() {
-        if (pushbackRunner != null) {
-            pushbackRunner.finishBundle();
-        } else {
-            runner.finishBundle();
-        }
-    }
-
-    public void setInternalDoFnExecutorId(int id) {
-        this.internalDoFnExecutorId = id;
-    }
-
-    public int getInternalDoFnExecutorId() {
-        return internalDoFnExecutorId;
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/DoFnRunnerWithMetrics.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/DoFnRunnerWithMetrics.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/DoFnRunnerWithMetrics.java
deleted file mode 100644
index 105dffb..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/DoFnRunnerWithMetrics.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import java.io.Closeable;
-import java.io.IOException;
-import org.apache.beam.runners.core.DoFnRunner;
-import org.apache.beam.sdk.metrics.MetricsContainer;
-import org.apache.beam.sdk.metrics.MetricsEnvironment;
-import org.apache.beam.sdk.state.TimeDomain;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.joda.time.Instant;
-
-/**
- * DoFnRunner decorator which registers {@link MetricsContainer}.
- */
-public class DoFnRunnerWithMetrics<InputT, OutputT> implements DoFnRunner<InputT, OutputT> {
-
-  private final String stepName;
-  private final DoFnRunner<InputT, OutputT> delegate;
-  private final MetricsReporter metricsReporter;
-
-  DoFnRunnerWithMetrics(
-      String stepName,
-      DoFnRunner<InputT, OutputT> delegate,
-      MetricsReporter metricsReporter) {
-    this.stepName = checkNotNull(stepName, "stepName");
-    this.delegate = checkNotNull(delegate, "delegate");
-    this.metricsReporter = checkNotNull(metricsReporter, "metricsReporter");
-  }
-
-  @Override
-  public void startBundle() {
-    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
-        metricsReporter.getMetricsContainer(stepName))) {
-      delegate.startBundle();
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void processElement(WindowedValue<InputT> elem) {
-    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
-        metricsReporter.getMetricsContainer(stepName))) {
-      delegate.processElement(elem);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void onTimer(String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) {
-    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
-        metricsReporter.getMetricsContainer(stepName))) {
-      delegate.onTimer(timerId, window, timestamp, timeDomain);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void finishBundle() {
-    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
-        metricsReporter.getMetricsContainer(stepName))) {
-      delegate.finishBundle();
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-    metricsReporter.updateMetrics();
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/Executor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/Executor.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/Executor.java
deleted file mode 100644
index 30348b2..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/Executor.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import java.io.Serializable;
-import java.util.Map;
-
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-import com.alibaba.jstorm.utils.Pair;
-
-public interface Executor extends Serializable {
-    /**
-     * Initialization during runtime
-     */
-    void init(ExecutorContext context);
-
-    <T> void  process(TupleTag<T> tag, WindowedValue<T> elem);
-
-    void cleanup();
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ExecutorContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ExecutorContext.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ExecutorContext.java
deleted file mode 100644
index 7f9aa77..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ExecutorContext.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import backtype.storm.task.TopologyContext;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.google.auto.value.AutoValue;
-
-@AutoValue
-public abstract class ExecutorContext {
-    public static ExecutorContext of(TopologyContext topologyContext, ExecutorsBolt bolt, IKvStoreManager kvStoreManager) {
-        return new AutoValue_ExecutorContext(topologyContext, bolt, kvStoreManager);
-    }
-
-    public abstract TopologyContext getTopologyContext();
-
-    public abstract ExecutorsBolt getExecutorsBolt();
-
-    public abstract IKvStoreManager getKvStoreManager();
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ExecutorsBolt.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ExecutorsBolt.java
deleted file mode 100644
index ebd9456..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/ExecutorsBolt.java
+++ /dev/null
@@ -1,332 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import java.io.IOException;
-import java.util.*;
-
-import avro.shaded.com.google.common.base.Joiner;
-import avro.shaded.com.google.common.collect.Sets;
-import backtype.storm.tuple.ITupleExt;
-import backtype.storm.tuple.TupleImplExt;
-import com.alibaba.jstorm.beam.translation.util.CommonInstance;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.alibaba.jstorm.cache.KvStoreIterable;
-import com.alibaba.jstorm.cache.KvStoreManagerFactory;
-import com.alibaba.jstorm.cluster.Common;
-import com.alibaba.jstorm.utils.KryoSerializer;
-import com.alibaba.jstorm.window.Watermark;
-import com.google.common.base.Function;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Maps;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.TupleTag;
-import org.joda.time.Instant;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-public class ExecutorsBolt extends AdaptorBasicBolt {
-    private static final long serialVersionUID = -7751043327801735211L;
-
-    private static final Logger LOG = LoggerFactory.getLogger(ExecutorsBolt.class);
-
-    protected ExecutorContext executorContext;
-
-    protected TimerService timerService;
-
-    // map from input tag to executor inside bolt
-    protected final Map<TupleTag, Executor> inputTagToExecutor = Maps.newHashMap();
-    // set of all output tags that will be emit outside bolt
-    protected final Set<TupleTag> outputTags = Sets.newHashSet();
-    protected final Set<TupleTag> externalOutputTags = Sets.newHashSet();
-    protected final Set<DoFnExecutor> doFnExecutors = Sets.newHashSet();
-    protected int internalDoFnExecutorId = 1;
-    protected final Map<Integer, DoFnExecutor> idToDoFnExecutor = Maps.newHashMap();
-
-    protected OutputCollector collector;
-
-    protected boolean isStatefulBolt = false;
-
-    protected KryoSerializer<WindowedValue> serializer;
-
-    public ExecutorsBolt() {
-
-    }
-
-    public void setStatefulBolt(boolean isStateful) {
-        isStatefulBolt = isStateful;
-    }
-
-    public void addExecutor(TupleTag inputTag, Executor executor) {
-        inputTagToExecutor.put(
-                checkNotNull(inputTag, "inputTag"),
-                checkNotNull(executor, "executor"));
-    }
-
-    public Map<TupleTag, Executor> getExecutors() {
-        return inputTagToExecutor;
-    }
-
-    public void registerExecutor(Executor executor) {
-        if (executor instanceof DoFnExecutor) {
-            DoFnExecutor doFnExecutor = (DoFnExecutor) executor;
-            idToDoFnExecutor.put(internalDoFnExecutorId, doFnExecutor);
-            doFnExecutor.setInternalDoFnExecutorId(internalDoFnExecutorId);
-            internalDoFnExecutorId++;
-        }
-    }
-
-    public Map<Integer, DoFnExecutor> getIdToDoFnExecutor() {
-        return idToDoFnExecutor;
-    }
-
-    public void addOutputTags(TupleTag tag) {
-        outputTags.add(tag);
-    }
-
-    public void addExternalOutputTag(TupleTag<?> tag) {
-        externalOutputTags.add(tag);
-    }
-
-    public Set<TupleTag> getOutputTags() {
-        return outputTags;
-    }
-
-    public ExecutorContext getExecutorContext() {
-        return executorContext;
-    }
-
-    @Override
-    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
-        LOG.info("Start to prepare for task-{}", context.getThisTaskId());
-        try {
-            this.collector = collector;
-
-            // init kv store manager
-            String storeName = String.format("task-%d", context.getThisTaskId());
-            String stateStorePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
-            IKvStoreManager kvStoreManager = isStatefulBolt ? KvStoreManagerFactory.getKvStoreManagerWithMonitor(context, storeName, stateStorePath, isStatefulBolt) :
-                    KvStoreManagerFactory.getKvStoreManager(stormConf, storeName, stateStorePath, isStatefulBolt);
-            this.executorContext = ExecutorContext.of(context, this, kvStoreManager);
-
-            // init time service
-            timerService = initTimerService();
-
-            // init all internal executors
-            for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
-                executor.init(executorContext);
-                if (executor instanceof DoFnExecutor) {
-                    doFnExecutors.add((DoFnExecutor) executor);
-                }
-            }
-
-            this.serializer = new KryoSerializer<WindowedValue>(stormConf);
-
-            LOG.info("ExecutorsBolt finished init. LocalExecutors={}", inputTagToExecutor.values());
-            LOG.info("inputTagToExecutor={}", inputTagToExecutor);
-            LOG.info("outputTags={}", outputTags);
-            LOG.info("externalOutputTags={}", externalOutputTags);
-            LOG.info("doFnExecutors={}", doFnExecutors);
-        } catch (IOException e) {
-            throw new RuntimeException("Failed to prepare executors bolt", e);
-        }
-    }
-
-    public TimerService initTimerService() {
-        TopologyContext context = executorContext.getTopologyContext();
-        List<Integer> tasks = FluentIterable.from(context.getThisSourceComponentTasks().entrySet())
-                .transformAndConcat(
-                        new Function<Map.Entry<String, List<Integer>>, Iterable<Integer>>() {
-                            @Override
-                            public Iterable<Integer> apply(Map.Entry<String, List<Integer>> value) {
-                                if (Common.isSystemComponent(value.getKey())) {
-                                    return Collections.EMPTY_LIST;
-                                } else {
-                                    return value.getValue();
-                                }
-                            }
-                        })
-                .toList();
-        TimerService ret = new TimerServiceImpl(executorContext);
-        ret.init(tasks);
-        return ret;
-    }
-
-    @Override
-    public void execute(Tuple input) {
-        // process a batch
-        String streamId = input.getSourceStreamId();
-        ITupleExt tuple = (ITupleExt) input;
-        Iterator<List<Object>> valueIterator = tuple.batchValues().iterator();
-        if (CommonInstance.BEAM_WATERMARK_STREAM_ID.equals(streamId)) {
-            while (valueIterator.hasNext()) {
-                processWatermark((Long) valueIterator.next().get(0), input.getSourceTask());
-            }
-        } else {
-            doFnStartBundle();
-            while (valueIterator.hasNext()) {
-                processElement(valueIterator.next(), streamId);
-            }
-            doFnFinishBundle();
-        }
-    }
-
-    private void processWatermark(long watermarkTs, int sourceTask) {
-        long newWaterMark = timerService.updateInputWatermark(sourceTask, watermarkTs);
-        LOG.debug("Recv waterMark-{} from task-{}, newWaterMark={}",
-                (new Instant(watermarkTs)).toDateTime(), sourceTask, (new Instant(newWaterMark)).toDateTime());
-        if (newWaterMark != 0) {
-            // Some buffer windows are going to be triggered.
-            doFnStartBundle();
-            timerService.fireTimers(newWaterMark);
-
-            // SideInput: If receiving water mark with max timestamp, It means no more data is supposed
-            // to be received from now on. So we are going to process all push back data.
-            if (newWaterMark == BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis()) {
-                for (DoFnExecutor doFnExecutor : doFnExecutors) {
-                    doFnExecutor.processAllPushBackElements();
-                }
-            }
-
-            doFnFinishBundle();
-        }
-
-        long currentWaterMark = timerService.currentOutputWatermark();
-        if (!externalOutputTags.isEmpty()) {
-            collector.flush();
-            collector.emit(
-                    CommonInstance.BEAM_WATERMARK_STREAM_ID,
-                    new Values(currentWaterMark));
-            LOG.debug("Send waterMark-{}", (new Instant(currentWaterMark)).toDateTime());
-        }
-    }
-
-    private void processElement(List<Object> values, String streamId) {
-        TupleTag inputTag = new TupleTag(streamId);
-        WindowedValue windowedValue = retrieveWindowedValueFromTupleValue(values);
-        processExecutorElem(inputTag, windowedValue);
-    }
-
-    public <T> void processExecutorElem(TupleTag<T> inputTag, WindowedValue<T> elem) {
-        LOG.debug("ProcessExecutorElem: inputTag={}, value={}", inputTag, elem.getValue());
-        if (elem != null) {
-            Executor executor = inputTagToExecutor.get(inputTag);
-            if (executor != null) {
-                executor.process(inputTag, elem);
-            }
-            if (externalOutputTags.contains(inputTag)) {
-                emitOutsideBolt(inputTag, elem);
-            }
-        } else {
-            LOG.info("Received null elem for tag={}", inputTag);
-        }
-    }
-
-    @Override
-    public void cleanup() {
-        for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
-            executor.cleanup();
-        }
-        executorContext.getKvStoreManager().close();
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
-    }
-
-    public TimerService timerService() {
-        return timerService;
-    }
-
-    public void setTimerService(TimerService service) {
-        timerService = service;
-    }
-
-    private WindowedValue retrieveWindowedValueFromTupleValue(List<Object> values) {
-        WindowedValue wv = null;
-        if (values.size() > 1) {
-            Object key = values.get(0);
-            WindowedValue value = serializer.deserialize((byte[]) values.get(1));
-            wv = value.withValue(KV.of(key, value.getValue()));
-        } else {
-            wv = serializer.deserialize((byte[])values.get(0));
-        }
-        return wv;
-    }
-
-    protected void emitOutsideBolt(TupleTag outputTag, WindowedValue outputValue) {
-        LOG.debug("Output outside: tag={}, value={}", outputTag, outputValue.getValue());
-        if (keyedEmit(outputTag.getId())) {
-            KV kv = (KV) outputValue.getValue();
-            byte[] immutableOutputValue = serializer.serialize(outputValue.withValue(kv.getValue()));
-            // Convert WindowedValue<KV> to <K, WindowedValue<V>>
-            if (kv.getKey() == null) {
-                // If key is null, emit "null" string here. Because, null value will be ignored in JStorm.
-                collector.emit(outputTag.getId(), new Values("null", immutableOutputValue));
-            } else {
-                collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableOutputValue));
-            }
-        } else {
-            byte[] immutableOutputValue = serializer.serialize(outputValue);
-            collector.emit(outputTag.getId(), new Values(immutableOutputValue));
-        }
-    }
-
-    private void doFnStartBundle() {
-        for (DoFnExecutor doFnExecutor : doFnExecutors) {
-            doFnExecutor.startBundle();
-        }
-    }
-
-    private void doFnFinishBundle() {
-        for (DoFnExecutor doFnExecutor : doFnExecutors) {
-            doFnExecutor.finishBundle();
-        }
-    }
-
-    @Override
-    public String toString() {
-        // LOG.info("bolt: " + executorContext.getTopologyContext().toJSONString());
-        List<String> ret = new ArrayList<>();
-        /*ret.add("inputTags");
-        for (TupleTag inputTag : inputTagToExecutor.keySet()) {
-            ret.add(inputTag.getId());
-        }*/
-        ret.add("internalExecutors");
-        for (Executor executor : inputTagToExecutor.values()) {
-            ret.add(executor.toString());
-        }
-        ret.add("externalOutputTags");
-        for (TupleTag output : externalOutputTags) {
-            ret.add(output.getId());
-        }
-        return Joiner.on('\n').join(ret).concat("\n");
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/FlattenExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/FlattenExecutor.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/FlattenExecutor.java
deleted file mode 100644
index 7158b2f..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/FlattenExecutor.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-public class FlattenExecutor<InputT> implements Executor {
-
-    private final String description;
-    private TupleTag mainOutputTag;
-    private ExecutorContext context;
-    private ExecutorsBolt executorsBolt;
-
-    public FlattenExecutor(String description, TupleTag mainTupleTag) {
-        this.description = checkNotNull(description, "description");
-        this.mainOutputTag = mainTupleTag;
-    }
-
-    @Override
-    public void init(ExecutorContext context) {
-        this.context = context;
-        this.executorsBolt = context.getExecutorsBolt();
-    }
-
-    @Override
-    public void process(TupleTag tag, WindowedValue elem) {
-        executorsBolt.processExecutorElem(mainOutputTag, elem);
-    }
-
-    @Override
-    public void cleanup() {}
-
-    @Override
-    public String toString() {
-        return description;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/GroupByWindowExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/GroupByWindowExecutor.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/GroupByWindowExecutor.java
deleted file mode 100644
index 1958c77..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/GroupByWindowExecutor.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import java.io.Serializable;
-import java.util.List;
-
-import com.alibaba.jstorm.beam.translation.runtime.state.JStormStateInternals;
-import com.alibaba.jstorm.beam.translation.runtime.timer.JStormTimerInternals;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
-import org.apache.beam.runners.core.DoFnRunner;
-import org.apache.beam.runners.core.DoFnRunners;
-import org.apache.beam.runners.core.ExecutionContext.StepContext;
-import org.apache.beam.runners.core.GroupAlsoByWindowViaWindowSetNewDoFn;
-import org.apache.beam.runners.core.KeyedWorkItem;
-import org.apache.beam.runners.core.KeyedWorkItems;
-import org.apache.beam.runners.core.StateInternals;
-import org.apache.beam.runners.core.StateInternalsFactory;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.runners.core.StateNamespaces;
-import org.apache.beam.runners.core.SystemReduceFn;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.core.TimerInternalsFactory;
-import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.runners.core.NullSideInputReader;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.TupleTag;
-
-import com.alibaba.jstorm.beam.StormPipelineOptions;
-import com.alibaba.jstorm.beam.translation.TranslationContext;
-import com.alibaba.jstorm.beam.translation.TranslationContext.UserGraphContext;
-import com.alibaba.jstorm.beam.translation.util.DefaultStepContext;
-import com.alibaba.jstorm.beam.util.RunnerUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static com.google.common.base.Preconditions.checkArgument;
-
-public class GroupByWindowExecutor<K, V> extends DoFnExecutor<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> {
-    private static final long serialVersionUID = -7563050475488610553L;
-
-    private static final Logger LOG = LoggerFactory.getLogger(GroupByWindowExecutor.class);
-
-    private class GroupByWindowOutputManager implements DoFnRunners.OutputManager, Serializable {
-
-        @Override
-        public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-            executorsBolt.processExecutorElem(tag, output);
-        }
-    }
-
-    private KvCoder<K, V> inputKvCoder;
-    private SystemReduceFn<K, V, Iterable<V>, Iterable<V>, BoundedWindow> reduceFn;
-
-    public GroupByWindowExecutor(
-            String stepName,
-            String description,
-            TranslationContext context,
-            StormPipelineOptions pipelineOptions,
-            WindowingStrategy<?, ?> windowingStrategy,
-            TupleTag<KV<K, Iterable<V>>> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
-        // The doFn will be created when runtime. Just pass "null" here
-        super(stepName, description, pipelineOptions, null, null, windowingStrategy, null, null, null, mainTupleTag, sideOutputTags);
-
-        this.outputManager = new GroupByWindowOutputManager();
-        UserGraphContext userGraphContext = context.getUserGraphContext();
-        PCollection<KV<K, V>> input = (PCollection<KV<K, V>>) userGraphContext.getInput();
-        this.inputKvCoder = (KvCoder<K, V>) input.getCoder();
-    }
-
-    private DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getGroupByWindowDoFn() {
-        final StateInternalsFactory<K> stateFactory = new StateInternalsFactory<K>() {
-            @Override
-            public StateInternals stateInternalsForKey(K key) {
-                return new JStormStateInternals<K>(key, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
-            }
-        };
-        TimerInternalsFactory<K> timerFactory = new TimerInternalsFactory<K>() {
-            @Override
-            public TimerInternals timerInternalsForKey(K key) {
-                return new JStormTimerInternals<>(key, GroupByWindowExecutor.this, executorContext.getExecutorsBolt().timerService());
-            }
-        };
-
-        reduceFn = SystemReduceFn.buffering(inputKvCoder.getValueCoder());
-        DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFn =
-              GroupAlsoByWindowViaWindowSetNewDoFn.create(
-                  windowingStrategy, stateFactory, timerFactory, NullSideInputReader.empty(),
-                      (SystemReduceFn) reduceFn, outputManager, mainTupleTag);
-        return doFn;
-    }
-
-    @Override
-    protected DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getDoFnRunner() {
-        doFn = getGroupByWindowDoFn();
-
-        DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> simpleRunner = DoFnRunners.<KeyedWorkItem<K, V>, KV<K, Iterable<V>>>simpleRunner(
-                this.pipelineOptions,
-                this.doFn,
-                NullSideInputReader.empty(),
-                this.outputManager,
-                this.mainTupleTag,
-                this.sideOutputTags,
-                this.stepContext,
-                this.windowingStrategy);
-
-        DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFnRunner = DoFnRunners.lateDataDroppingRunner(
-                simpleRunner,
-                this.stepContext,
-                this.windowingStrategy);
-        return new DoFnRunnerWithMetrics<>(
-            stepName, doFnRunner, MetricsReporter.create(metricClient));
-    }
-
-    @Override
-    public void process(TupleTag tag, WindowedValue elem) {
-        /**
-         *  For GroupByKey, KV type elem is received. We need to convert the KV elem
-         *  into KeyedWorkItem first, which is the expected type in LateDataDroppingDoFnRunner.
-         */
-        KeyedWorkItem<K, V> keyedWorkItem = RunnerUtils.toKeyedWorkItem((WindowedValue<KV<K, V>>) elem);
-        runner.processElement(elem.withValue(keyedWorkItem));
-    }
-
-    @Override
-    public void onTimer(Object key, TimerInternals.TimerData timerData) {
-        StateNamespace namespace = timerData.getNamespace();
-        checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
-
-        runner.processElement(
-                WindowedValue.valueInGlobalWindow(
-                        KeyedWorkItems.<K, V>timersWorkItem((K) key, ImmutableList.of(timerData))));
-    }
-
-    @Override
-    public String toString() {
-        return super.toString();
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MetricsReporter.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MetricsReporter.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MetricsReporter.java
deleted file mode 100644
index 33095b1..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MetricsReporter.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-import static org.apache.beam.runners.core.metrics.MetricsContainerStepMap.asAttemptedOnlyMetricResults;
-
-import com.alibaba.jstorm.common.metric.AsmCounter;
-import com.alibaba.jstorm.common.metric.AsmGauge;
-import com.alibaba.jstorm.common.metric.AsmHistogram;
-import com.alibaba.jstorm.common.metric.AsmMeter;
-import com.alibaba.jstorm.common.metric.AsmMetric;
-import com.alibaba.jstorm.metric.MetricClient;
-import com.google.common.collect.Maps;
-import java.util.Map;
-import org.apache.beam.runners.core.metrics.MetricsContainerStepMap;
-import org.apache.beam.sdk.metrics.DistributionResult;
-import org.apache.beam.sdk.metrics.GaugeResult;
-import org.apache.beam.sdk.metrics.MetricQueryResults;
-import org.apache.beam.sdk.metrics.MetricResult;
-import org.apache.beam.sdk.metrics.MetricResults;
-import org.apache.beam.sdk.metrics.MetricsContainer;
-import org.apache.beam.sdk.metrics.MetricsFilter;
-
-/**
- * Class that holds a {@link MetricsContainerStepMap}, and reports metrics to JStorm engine.
- */
-public class MetricsReporter {
-
-  private static final String METRIC_KEY_SEPARATOR = "__";
-  private static final String COUNTER_PREFIX = "__counter";
-
-  private final MetricsContainerStepMap metricsContainers = new MetricsContainerStepMap();
-  private final Map<String, Long> reportedCounters = Maps.newHashMap();
-  private final MetricClient metricClient;
-
-  public static MetricsReporter create(MetricClient metricClient) {
-    return new MetricsReporter(metricClient);
-  }
-
-  private MetricsReporter(MetricClient metricClient) {
-    this.metricClient = checkNotNull(metricClient, "metricClient");
-  }
-
-  public MetricsContainer getMetricsContainer(String stepName) {
-    return metricsContainers.getContainer(stepName);
-  }
-
-  public void updateMetrics() {
-    MetricResults metricResults = asAttemptedOnlyMetricResults(metricsContainers);
-    MetricQueryResults metricQueryResults =
-        metricResults.queryMetrics(MetricsFilter.builder().build());
-    updateCounters(metricQueryResults.counters());
-  }
-
-  private void updateCounters(Iterable<MetricResult<Long>> counters) {
-    System.out.print("updateCounters");
-    for (MetricResult<Long> metricResult : counters) {
-      String metricName = getMetricNameString(COUNTER_PREFIX, metricResult);
-      System.out.print("metricName: " + metricName);
-      Long updateValue = metricResult.attempted();
-      Long oldValue = reportedCounters.get(metricName);
-
-      if (oldValue == null || oldValue < updateValue) {
-        AsmCounter counter = metricClient.registerCounter(metricName);
-        Long incValue = (oldValue == null ? updateValue : updateValue - oldValue);
-        counter.update(incValue);
-      }
-    }
-  }
-
-  private String getMetricNameString(String prefix, MetricResult<?> metricResult) {
-    return prefix
-        + METRIC_KEY_SEPARATOR + metricResult.step()
-        + METRIC_KEY_SEPARATOR + metricResult.name().namespace()
-        + METRIC_KEY_SEPARATOR + metricResult.name().name();
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MultiOutputDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MultiOutputDoFnExecutor.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MultiOutputDoFnExecutor.java
deleted file mode 100644
index bd3dfb3..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MultiOutputDoFnExecutor.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import com.alibaba.jstorm.beam.StormPipelineOptions;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.WindowingStrategy;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-
-public class MultiOutputDoFnExecutor<InputT, OutputT> extends DoFnExecutor<InputT, OutputT> {
-    private static final Logger LOG = LoggerFactory.getLogger(MultiOutputDoFnExecutor.class);
-
-    /**
-     * For multi-output scenario,a "local" tuple tag is used in producer currently while a generated tag
-     * is used in downstream consumer. So before output, we need to map this "local" tag to "external"
-     * tag. See PCollectionTuple for details.
-     */
-    public class MultiOutputDoFnExecutorOutputManager extends DoFnExecutorOutputManager {
-        @Override
-        public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-            if (localTupleTagMap.containsKey(tag)) {
-                executorsBolt.processExecutorElem((TupleTag<T>) localTupleTagMap.get(tag), output);
-            } else {
-                executorsBolt.processExecutorElem(tag, output);
-            }
-        }
-    }
-
-    protected Map<TupleTag<?>, TupleTag<?>> localTupleTagMap;
-
-    public MultiOutputDoFnExecutor(
-            String stepName,
-            String description,
-            StormPipelineOptions pipelineOptions,
-            DoFn<InputT, OutputT> doFn,
-            Coder<WindowedValue<InputT>> inputCoder,
-            WindowingStrategy<?, ?> windowingStrategy,
-            TupleTag<InputT> mainInputTag,
-            Collection<PCollectionView<?>> sideInputs,
-            Map<TupleTag, PCollectionView<?>> sideInputTagToView,
-            TupleTag<OutputT> mainTupleTag,
-            List<TupleTag<?>> sideOutputTags,
-            Map<TupleTag<?>, TupleTag<?>> localTupleTagMap
-            ) {
-        super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag,
-                sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
-        this.localTupleTagMap = localTupleTagMap;
-        this.outputManager = new MultiOutputDoFnExecutorOutputManager();
-        LOG.info("localTupleTagMap: {}", localTupleTagMap);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa654b3f/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MultiStatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MultiStatefulDoFnExecutor.java b/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MultiStatefulDoFnExecutor.java
deleted file mode 100644
index 51aa960..0000000
--- a/runners/jstorm/src/main/java/com/alibaba/jstorm/beam/translation/runtime/MultiStatefulDoFnExecutor.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.alibaba.jstorm.beam.translation.runtime;
-
-import com.alibaba.jstorm.beam.StormPipelineOptions;
-import com.alibaba.jstorm.beam.translation.runtime.state.JStormStateInternals;
-import com.alibaba.jstorm.beam.translation.runtime.timer.JStormTimerInternals;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.WindowingStrategy;
-
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-
-public class MultiStatefulDoFnExecutor<OutputT> extends MultiOutputDoFnExecutor<KV, OutputT> {
-
-    public MultiStatefulDoFnExecutor(
-            String stepName, String description,
-            StormPipelineOptions pipelineOptions, DoFn<KV, OutputT> doFn,
-            Coder<WindowedValue<KV>> inputCoder, WindowingStrategy<?, ?> windowingStrategy,
-            TupleTag<KV> mainInputTag, Collection<PCollectionView<?>> sideInputs,
-            Map<TupleTag, PCollectionView<?>> sideInputTagToView, TupleTag<OutputT> mainTupleTag,
-            List<TupleTag<?>> sideOutputTags, Map<TupleTag<?>, TupleTag<?>> localTupleTagMap) {
-        super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag, sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags, localTupleTagMap);
-    }
-
-    @Override
-    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-        if (mainInputTag.equals(tag)) {
-            WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
-            stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
-                    executorContext.getExecutorsBolt().timerService()));
-            stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
-                    kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-            processMainInput(elem);
-        } else {
-            processSideInput(tag, elem);
-        }
-    }
-
-    @Override
-    public void onTimer(Object key, TimerInternals.TimerData timerData) {
-        stepContext.setStateInternals(new JStormStateInternals<>(key,
-                kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-        super.onTimer(key, timerData);
-    }
-}


[46/53] [abbrv] beam git commit: jstorm-runner: 1. Generate execution DAG for runtime 2. Restructure Kryo serializers

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
deleted file mode 100644
index 615ac8b..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.serialization;
-
-import backtype.storm.Config;
-import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
-import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
-import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
-/**
- * Specific serializer of {@link Kryo} for Unmodifiable Collection.
- */
-public class UnmodifiableCollectionsSerializer extends Serializer<Object> {
-
-  private static final Field SOURCE_COLLECTION_FIELD;
-  private static final Field SOURCE_MAP_FIELD;
-
-  static {
-    try {
-      SOURCE_COLLECTION_FIELD = Class.forName("java.util.Collections$UnmodifiableCollection")
-          .getDeclaredField("c");
-      SOURCE_COLLECTION_FIELD.setAccessible(true);
-
-
-      SOURCE_MAP_FIELD = Class.forName("java.util.Collections$UnmodifiableMap")
-          .getDeclaredField("m");
-      SOURCE_MAP_FIELD.setAccessible(true);
-    } catch (final Exception e) {
-      throw new RuntimeException("Could not access source collection"
-          + " field in java.util.Collections$UnmodifiableCollection.", e);
-    }
-  }
-
-  @Override
-  public Object read(final Kryo kryo, final Input input, final Class<Object> clazz) {
-    final int ordinal = input.readInt(true);
-    final UnmodifiableCollection unmodifiableCollection = UnmodifiableCollection.values()[ordinal];
-    final Object sourceCollection = kryo.readClassAndObject(input);
-    return unmodifiableCollection.create(sourceCollection);
-  }
-
-  @Override
-  public void write(final Kryo kryo, final Output output, final Object object) {
-    try {
-      final UnmodifiableCollection unmodifiableCollection =
-          UnmodifiableCollection.valueOfType(object.getClass());
-      // the ordinal could be replaced by s.th. else (e.g. a explicitely managed "id")
-      output.writeInt(unmodifiableCollection.ordinal(), true);
-      kryo.writeClassAndObject(output, unmodifiableCollection.sourceCollectionField.get(object));
-    } catch (final RuntimeException e) {
-      // Don't eat and wrap RuntimeExceptions because the ObjectBuffer.write...
-      // handles SerializationException specifically (resizing the buffer)...
-      throw e;
-    } catch (final Exception e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public Object copy(Kryo kryo, Object original) {
-    try {
-      final UnmodifiableCollection unmodifiableCollection =
-          UnmodifiableCollection.valueOfType(original.getClass());
-      Object sourceCollectionCopy =
-          kryo.copy(unmodifiableCollection.sourceCollectionField.get(original));
-      return unmodifiableCollection.create(sourceCollectionCopy);
-    } catch (final RuntimeException e) {
-      // Don't eat and wrap RuntimeExceptions
-      throw e;
-    } catch (final Exception e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  private enum UnmodifiableCollection {
-    COLLECTION(
-        Collections.unmodifiableCollection(Arrays.asList("")).getClass(),
-        SOURCE_COLLECTION_FIELD) {
-      @Override
-      public Object create(final Object sourceCollection) {
-        return Collections.unmodifiableCollection((Collection<?>) sourceCollection);
-      }
-    },
-    RANDOM_ACCESS_LIST(
-        Collections.unmodifiableList(new ArrayList<Void>()).getClass(),
-        SOURCE_COLLECTION_FIELD) {
-      @Override
-      public Object create(final Object sourceCollection) {
-        return Collections.unmodifiableList((List<?>) sourceCollection);
-      }
-    },
-    LIST(Collections.unmodifiableList(new LinkedList<Void>()).getClass(), SOURCE_COLLECTION_FIELD) {
-      @Override
-      public Object create(final Object sourceCollection) {
-        return Collections.unmodifiableList((List<?>) sourceCollection);
-      }
-    },
-    SET(Collections.unmodifiableSet(new HashSet<Void>()).getClass(), SOURCE_COLLECTION_FIELD) {
-      @Override
-      public Object create(final Object sourceCollection) {
-        return Collections.unmodifiableSet((Set<?>) sourceCollection);
-      }
-    },
-    SORTED_SET(
-        Collections.unmodifiableSortedSet(new TreeSet<Void>()).getClass(),
-        SOURCE_COLLECTION_FIELD) {
-      @Override
-      public Object create(final Object sourceCollection) {
-        return Collections.unmodifiableSortedSet((SortedSet<?>) sourceCollection);
-      }
-    },
-    MAP(Collections.unmodifiableMap(new HashMap<Void, Void>()).getClass(), SOURCE_MAP_FIELD) {
-      @Override
-      public Object create(final Object sourceCollection) {
-        return Collections.unmodifiableMap((Map<?, ?>) sourceCollection);
-      }
-
-    },
-    SORTED_MAP(
-        Collections.unmodifiableSortedMap(new TreeMap<Void, Void>()).getClass(),
-        SOURCE_MAP_FIELD) {
-      @Override
-      public Object create(final Object sourceCollection) {
-        return Collections.unmodifiableSortedMap((SortedMap<?, ?>) sourceCollection);
-      }
-    };
-
-    private final Class<?> type;
-    private final Field sourceCollectionField;
-
-    private UnmodifiableCollection(final Class<?> type, final Field sourceCollectionField) {
-      this.type = type;
-      this.sourceCollectionField = sourceCollectionField;
-    }
-
-    /**
-     * @param sourceCollection
-     */
-    public abstract Object create(Object sourceCollection);
-
-    static UnmodifiableCollection valueOfType(final Class<?> type) {
-      for (final UnmodifiableCollection item : values()) {
-        if (item.type.equals(type)) {
-          return item;
-        }
-      }
-      throw new IllegalArgumentException("The type " + type + " is not supported.");
-    }
-
-  }
-
-  /**
-   * Creates a new {@link UnmodifiableCollectionsSerializer} and registers its serializer
-   * for the several unmodifiable Collections that can be created via {@link Collections},
-   * including {@link Map}s.
-   *
-   * @see Collections#unmodifiableCollection(Collection)
-   * @see Collections#unmodifiableList(List)
-   * @see Collections#unmodifiableSet(Set)
-   * @see Collections#unmodifiableSortedSet(SortedSet)
-   * @see Collections#unmodifiableMap(Map)
-   * @see Collections#unmodifiableSortedMap(SortedMap)
-   */
-  public static void registerSerializers(Config config) {
-    UnmodifiableCollection.values();
-    for (final UnmodifiableCollection item : UnmodifiableCollection.values()) {
-      config.registerSerialization(item.type, UnmodifiableCollectionsSerializer.class);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java
index f64193e..53555c9 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/BoundedSourceTranslator.java
@@ -39,6 +39,7 @@ class BoundedSourceTranslator<T> extends TransformTranslator.Default<Read.Bounde
     TupleTag<?> outputTag = userGraphContext.getOutputTag();
     PValue outputValue = userGraphContext.getOutput();
     UnboundedSourceSpout spout = new UnboundedSourceSpout(
+        userGraphContext.getStepName(),
         description,
         new UnboundedReadFromBoundedSource.BoundedToUnboundedSourceAdapter(transform.getSource()),
         userGraphContext.getOptions(), outputTag);

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
index 6baa944..2148f34 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
@@ -277,7 +277,7 @@ class DoFnExecutor<InputT, OutputT> implements Executor {
           pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
       if (pushedBackElements != null) {
         for (WindowedValue<InputT> elem : pushedBackElements.read()) {
-          LOG.info("Process pushback elem={}", elem);
+          LOG.debug("Process pushed back elem: {}", elem);
           runner.processElement(elem);
         }
         pushedBackElements.clear();

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java
index 145b224..8812988 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/Executor.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.values.TupleTag;
 /**
  * An executor is a basic executable unit in a JStorm task.
  */
-interface Executor extends Serializable {
+public interface Executor extends Serializable {
   /**
    * Initialization during runtime.
    */

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
index 33393f2..f8e09be 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ExecutorsBolt.java
@@ -36,6 +36,7 @@ import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
@@ -63,6 +64,8 @@ public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
 
   // map from input tag to executor inside bolt
   protected final Map<TupleTag, Executor> inputTagToExecutor = Maps.newHashMap();
+  protected final Map<Executor, Collection<TupleTag>> executorToOutputTags = Maps.newHashMap();
+  protected final Map<Executor, String> executorToPTransformName = Maps.newHashMap();
   // set of all output tags that will be emit outside bolt
   protected final Set<TupleTag> outputTags = Sets.newHashSet();
   protected final Set<TupleTag> externalOutputTags = Sets.newHashSet();
@@ -84,16 +87,21 @@ public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
     isStatefulBolt = isStateful;
   }
 
-  public void addExecutor(TupleTag inputTag, Executor executor) {
+  public void addExecutor(TupleTag inputTag, Executor executor, String name) {
     inputTagToExecutor.put(
         checkNotNull(inputTag, "inputTag"),
         checkNotNull(executor, "executor"));
+    executorToPTransformName.put(executor, name);
   }
 
   public Map<TupleTag, Executor> getExecutors() {
     return inputTagToExecutor;
   }
 
+  public Map<Executor, String> getExecutorNames() {
+    return executorToPTransformName;
+  }
+
   public void registerExecutor(Executor executor) {
     if (executor instanceof DoFnExecutor) {
       DoFnExecutor doFnExecutor = (DoFnExecutor) executor;
@@ -107,14 +115,31 @@ public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
     return idToDoFnExecutor;
   }
 
-  public void addOutputTags(TupleTag tag) {
-    outputTags.add(tag);
+  public void addOutputTags(Executor executor, TupleTag outputTag) {
+    Collection<TupleTag> outTags;
+    if (executorToOutputTags.containsKey(executor)) {
+      outTags = executorToOutputTags.get(executor);
+    } else {
+      outTags = Sets.newHashSet();
+      executorToOutputTags.put(executor, outTags);
+    }
+    outTags.add(outputTag);
+
+    outputTags.add(outputTag);
+  }
+
+  public Map<Executor, Collection<TupleTag>> getExecutorToOutputTags() {
+    return executorToOutputTags;
   }
 
   public void addExternalOutputTag(TupleTag<?> tag) {
     externalOutputTags.add(tag);
   }
 
+  public Collection<TupleTag> getExternalOutputTags() {
+    return externalOutputTags;
+  }
+
   public Set<TupleTag> getOutputTags() {
     return outputTags;
   }
@@ -328,6 +353,10 @@ public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
     for (Executor executor : inputTagToExecutor.values()) {
       ret.add(executor.toString());
     }
+    ret.add("outputTags");
+    for (TupleTag outputTag : outputTags) {
+      ret.add(outputTag.getId());
+    }
     ret.add("externalOutputTags");
     for (TupleTag output : externalOutputTags) {
       ret.add(output.getId());

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
index b96bc56..ebe8bc3 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
@@ -70,6 +70,7 @@ class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PCollecti
       PValue output = userGraphContext.getOutput();
 
       UnboundedSourceSpout spout = new UnboundedSourceSpout(
+          userGraphContext.getStepName(),
           description,
           new EmptySource(),
           userGraphContext.getOptions(),

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
index 90ef6d2..292b771 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormStateInternals.java
@@ -28,6 +28,7 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.NoSuchElementException;
 import javax.annotation.Nullable;
+
 import org.apache.beam.runners.core.StateInternals;
 import org.apache.beam.runners.core.StateNamespace;
 import org.apache.beam.runners.core.StateTag;
@@ -328,8 +329,8 @@ class JStormStateInternals<K> implements StateInternals {
     public void add(T input) {
       try {
         int elemIndex = getElementIndex();
+        stateInfoKvState.put(getComposedKey(), elemIndex + 1);
         kvState.put(getComposedKey(elemIndex), input);
-        stateInfoKvState.put(getComposedKey(), ++elemIndex);
       } catch (IOException e) {
         throw new RuntimeException(e.getCause());
       }
@@ -381,23 +382,11 @@ class JStormStateInternals<K> implements StateInternals {
     }
 
     private ComposedKey getComposedKey() {
-      return ComposedKey.of(key, namespace);
+      return ComposedKey.of(id, key, namespace);
     }
 
     private ComposedKey getComposedKey(int elemIndex) {
-      return ComposedKey.of(key, namespace, elemIndex);
-    }
-
-    @Override
-    public String toString() {
-      int elemIndex = -1;
-      try {
-        elemIndex = getElementIndex();
-      } catch (IOException e) {
-
-      }
-      return String.format("stateId=%s, key=%s, namespace=%s, elementIndex=%d",
-              id, key, namespace, elemIndex);
+      return ComposedKey.of(id, key, namespace, elemIndex);
     }
 
     @Override
@@ -475,11 +464,6 @@ class JStormStateInternals<K> implements StateInternals {
       public Iterator<T> iterator() {
         return new BagStateIterator();
       }
-
-      @Override
-      public String toString() {
-        return String.format("BagStateIterable: composedKey=%s", getComposedKey());
-      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
index 101921f..0991448 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
@@ -186,7 +186,7 @@ public class TranslationContext {
       executionGraphContext.registerStreamProducer(
           TaggedPValue.of(tag, value),
           Stream.Producer.of(name, tag.getId(), value.getName()));
-      //bolt.addOutputTags(tag);
+      bolt.addOutputTags(executor, tag);
     }
 
     // add the transform executor into the chain of ExecutorsBolt
@@ -196,7 +196,7 @@ public class TranslationContext {
       if (userGraphContext.findTupleTag(value) != null) {
         tag = userGraphContext.findTupleTag(value);
       }
-      bolt.addExecutor(tag, executor);
+      bolt.addExecutor(tag, executor, userGraphContext.getStepName());
 
       // filter all connections inside bolt
       //if (!bolt.getOutputTags().contains(tag)) {
@@ -212,7 +212,7 @@ public class TranslationContext {
 
     for (PValue sideInput : sideInputs) {
       TupleTag tag = userGraphContext.findTupleTag(sideInput);
-      bolt.addExecutor(tag, executor);
+      bolt.addExecutor(tag, executor, userGraphContext.getStepName());
       checkState(!bolt.getOutputTags().contains(tag));
       addStormStreamDef(
           TaggedPValue.of(tag, sideInput), name, Stream.Grouping.of(Stream.Grouping.Type.ALL));
@@ -304,6 +304,15 @@ public class TranslationContext {
       return pValueToTupleTag.get(checkNotNull(pValue, "pValue"));
     }
 
+    public PValue findPValue(TupleTag tupleTag) {
+      for (Map.Entry<PValue, TupleTag> entry : pValueToTupleTag.entrySet()) {
+        if (entry.getValue().equals(tupleTag)) {
+            return entry.getKey();
+        }
+      }
+      return null;
+    }
+
     public void setWindowed() {
       this.isWindowed = true;
     }
@@ -361,6 +370,10 @@ public class TranslationContext {
       return this.spoutMap;
     }
 
+    public Map<String, ExecutorsBolt> getBolts() {
+      return this.boltMap;
+    }
+
     public String registerBolt(ExecutorsBolt bolt) {
       checkNotNull(bolt, "bolt");
       String name = "bolt" + genId();

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java
index dab9518..4ae28e6 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java
@@ -27,6 +27,7 @@ import com.alibaba.jstorm.utils.KryoSerializer;
 import java.io.IOException;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicBoolean;
+
 import org.apache.beam.runners.jstorm.JStormPipelineOptions;
 import org.apache.beam.sdk.io.UnboundedSource;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
@@ -45,6 +46,7 @@ import org.slf4j.LoggerFactory;
 public class UnboundedSourceSpout extends AbstractComponent implements IRichSpout {
   private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceSpout.class);
 
+  private final String name;
   private final String description;
   private final UnboundedSource source;
   private final SerializedPipelineOptions serializedOptions;
@@ -62,10 +64,12 @@ public class UnboundedSourceSpout extends AbstractComponent implements IRichSpou
   private long lastWaterMark = 0L;
 
   public UnboundedSourceSpout(
+      String name,
       String description,
       UnboundedSource source,
       JStormPipelineOptions options,
       TupleTag<?> outputTag) {
+    this.name = name;
     this.description = checkNotNull(description, "description");
     this.source = checkNotNull(source, "source");
     this.serializedOptions = new SerializedPipelineOptions(checkNotNull(options, "options"));
@@ -174,6 +178,14 @@ public class UnboundedSourceSpout extends AbstractComponent implements IRichSpou
     }
   }
 
+  public String getName() {
+    return name;
+  }
+
+  public TupleTag getOutputTag() {
+    return outputTag;
+  }
+
   public UnboundedSource getUnboundedSource() {
     return source;
   }

http://git-wip-us.apache.org/repos/asf/beam/blob/6078cbc6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceTranslator.java
index 54c9b94..7cf2469 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceTranslator.java
@@ -37,6 +37,7 @@ class UnboundedSourceTranslator<T> extends TransformTranslator.Default<Read.Unbo
     PValue output = userGraphContext.getOutput();
 
     UnboundedSourceSpout spout = new UnboundedSourceSpout(
+        userGraphContext.getStepName(),
         description,
         transform.getSource(), userGraphContext.getOptions(), tag);
     context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(tag, output));


[44/53] [abbrv] beam git commit: jstorm-runner: add missing "apache license" header for some files.

Posted by pe...@apache.org.
jstorm-runner: add missing "apache license" header for some files.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/61e9fa65
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/61e9fa65
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/61e9fa65

Branch: refs/heads/jstorm-runner
Commit: 61e9fa6581377dcb50edb4e4b48bac353cb3ba0d
Parents: e182cf7
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Mon Jul 24 13:00:37 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:03:00 2017 +0800

----------------------------------------------------------------------
 .../beam/runners/jstorm/TestJStormRunner.java     | 17 +++++++++++++++++
 .../serialization/CollectionsSerializer.java      | 17 +++++++++++++++++
 .../serialization/ImmutableListSerializer.java    | 17 +++++++++++++++++
 .../serialization/ImmutableMapSerializer.java     | 17 +++++++++++++++++
 .../serialization/ImmutableSetSerializer.java     | 17 +++++++++++++++++
 .../serialization/KvStoreIterableSerializer.java  | 17 +++++++++++++++++
 .../SdkRepackImmuListSerializer.java              | 18 +++++++++++++++++-
 .../serialization/SdkRepackImmuSetSerializer.java | 17 +++++++++++++++++
 .../SdkRepackImmutableMapSerializer.java          | 17 +++++++++++++++++
 .../UnmodifiableCollectionsSerializer.java        | 17 +++++++++++++++++
 runners/jstorm/src/test/resources/logback.xml     | 14 ++++++++++++++
 11 files changed, 184 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/61e9fa65/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
index 3124da2..21a58e3 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.beam.runners.jstorm;
 
 import static com.google.common.base.Preconditions.checkNotNull;

http://git-wip-us.apache.org/repos/asf/beam/blob/61e9fa65/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java
index 0548196..1c8053e 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/CollectionsSerializer.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.beam.runners.jstorm.serialization;
 
 import backtype.storm.Config;

http://git-wip-us.apache.org/repos/asf/beam/blob/61e9fa65/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
index 268774c..215ccf1 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.beam.runners.jstorm.serialization;
 
 import backtype.storm.Config;

http://git-wip-us.apache.org/repos/asf/beam/blob/61e9fa65/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
index 6b998fc..6fe3f59 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.beam.runners.jstorm.serialization;
 
 import backtype.storm.Config;

http://git-wip-us.apache.org/repos/asf/beam/blob/61e9fa65/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
index edc7b09..625a32c 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.beam.runners.jstorm.serialization;
 
 import backtype.storm.Config;

http://git-wip-us.apache.org/repos/asf/beam/blob/61e9fa65/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
index 3835816..44a5378 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.beam.runners.jstorm.serialization;
 
 import com.alibaba.jstorm.cache.KvStoreIterable;

http://git-wip-us.apache.org/repos/asf/beam/blob/61e9fa65/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
index f1ed644..e4e0e12 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
@@ -1,6 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.beam.runners.jstorm.serialization;
 
-
 import backtype.storm.Config;
 import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
 import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;

http://git-wip-us.apache.org/repos/asf/beam/blob/61e9fa65/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
index d1ed046..3cb60e8 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.beam.runners.jstorm.serialization;
 
 import backtype.storm.Config;

http://git-wip-us.apache.org/repos/asf/beam/blob/61e9fa65/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java
index 546538a..1a0d902 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmutableMapSerializer.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.beam.runners.jstorm.serialization;
 
 import backtype.storm.Config;

http://git-wip-us.apache.org/repos/asf/beam/blob/61e9fa65/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
index 33343fc..615ac8b 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.beam.runners.jstorm.serialization;
 
 import backtype.storm.Config;

http://git-wip-us.apache.org/repos/asf/beam/blob/61e9fa65/runners/jstorm/src/test/resources/logback.xml
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/resources/logback.xml b/runners/jstorm/src/test/resources/logback.xml
index 635933c..1e63106 100644
--- a/runners/jstorm/src/test/resources/logback.xml
+++ b/runners/jstorm/src/test/resources/logback.xml
@@ -1,4 +1,18 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+       http://www.apache.org/licenses/LICENSE-2.0
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
 <configuration scan="true" scanPeriod="30 seconds">
     <appender name="A1" class="ch.qos.logback.core.ConsoleAppender">
         <encoder>


[22/53] [abbrv] beam git commit: jstorm-runner: move most classes to translation package and reduece their visibility to package private.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
deleted file mode 100644
index a26472c..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import org.apache.beam.runners.jstorm.JStormPipelineOptions;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.WindowingStrategy;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * JStorm {@link Executor} for {@link DoFn} with multi-output.
- * @param <InputT>
- * @param <OutputT>
- */
-public class MultiOutputDoFnExecutor<InputT, OutputT> extends DoFnExecutor<InputT, OutputT> {
-  private static final Logger LOG = LoggerFactory.getLogger(MultiOutputDoFnExecutor.class);
-
-  /**
-   * For multi-output scenario,a "local" tuple tag is used in producer currently while a generated
-   * tag is used in downstream consumer. So before output, we need to map this "local" tag to
-   * "external" tag. See PCollectionTuple for details.
-   */
-  public class MultiOutputDoFnExecutorOutputManager extends DoFnExecutorOutputManager {
-    @Override
-    public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-      if (localTupleTagMap.containsKey(tag)) {
-        executorsBolt.processExecutorElem((TupleTag<T>) localTupleTagMap.get(tag), output);
-      } else {
-        executorsBolt.processExecutorElem(tag, output);
-      }
-    }
-  }
-
-  protected Map<TupleTag<?>, TupleTag<?>> localTupleTagMap;
-
-  public MultiOutputDoFnExecutor(
-      String stepName,
-      String description,
-      JStormPipelineOptions pipelineOptions,
-      DoFn<InputT, OutputT> doFn,
-      Coder<WindowedValue<InputT>> inputCoder,
-      WindowingStrategy<?, ?> windowingStrategy,
-      TupleTag<InputT> mainInputTag,
-      Collection<PCollectionView<?>> sideInputs,
-      Map<TupleTag, PCollectionView<?>> sideInputTagToView,
-      TupleTag<OutputT> mainTupleTag,
-      List<TupleTag<?>> sideOutputTags,
-      Map<TupleTag<?>, TupleTag<?>> localTupleTagMap
-  ) {
-    super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag,
-        sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
-    this.localTupleTagMap = localTupleTagMap;
-    this.outputManager = new MultiOutputDoFnExecutorOutputManager();
-    LOG.info("localTupleTagMap: {}", localTupleTagMap);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
deleted file mode 100644
index 5e87cff..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.jstorm.JStormPipelineOptions;
-import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
-import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.WindowingStrategy;
-
-/**
- * JStorm {@link Executor} for stateful {@link DoFn} with multi-output.
- * @param <OutputT>
- */
-public class MultiStatefulDoFnExecutor<OutputT> extends MultiOutputDoFnExecutor<KV, OutputT> {
-
-  public MultiStatefulDoFnExecutor(
-      String stepName, String description,
-      JStormPipelineOptions pipelineOptions, DoFn<KV, OutputT> doFn,
-      Coder<WindowedValue<KV>> inputCoder, WindowingStrategy<?, ?> windowingStrategy,
-      TupleTag<KV> mainInputTag, Collection<PCollectionView<?>> sideInputs,
-      Map<TupleTag, PCollectionView<?>> sideInputTagToView, TupleTag<OutputT> mainTupleTag,
-      List<TupleTag<?>> sideOutputTags, Map<TupleTag<?>, TupleTag<?>> localTupleTagMap) {
-    super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag,
-        sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags, localTupleTagMap);
-  }
-
-  @Override
-  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-    if (mainInputTag.equals(tag)) {
-      WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
-      stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
-          executorContext.getExecutorsBolt().timerService()));
-      stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
-          kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-      processMainInput(elem);
-    } else {
-      processSideInput(tag, elem);
-    }
-  }
-
-  @Override
-  public void onTimer(Object key, TimerInternals.TimerData timerData) {
-    stepContext.setStateInternals(new JStormStateInternals<>(key,
-        kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-    super.onTimer(key, timerData);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
deleted file mode 100644
index 77ae844..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.jstorm.JStormPipelineOptions;
-import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
-import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.WindowingStrategy;
-
-/**
- * JStorm {@link Executor} for stateful {@link DoFn}.
- * @param <OutputT>
- */
-public class StatefulDoFnExecutor<OutputT> extends DoFnExecutor<KV, OutputT> {
-  public StatefulDoFnExecutor(
-      String stepName, String description, JStormPipelineOptions pipelineOptions,
-      DoFn<KV, OutputT> doFn, Coder<WindowedValue<KV>> inputCoder,
-      WindowingStrategy<?, ?> windowingStrategy, TupleTag<KV> mainInputTag,
-      Collection<PCollectionView<?>> sideInputs, Map<TupleTag, PCollectionView<?>>
-          sideInputTagToView, TupleTag<OutputT> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
-    super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy,
-        mainInputTag, sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
-  }
-
-  @Override
-  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-    if (mainInputTag.equals(tag)) {
-      WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
-      stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
-          executorContext.getExecutorsBolt().timerService()));
-      stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
-          kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-      processMainInput(elem);
-    } else {
-      processSideInput(tag, elem);
-    }
-  }
-
-  @Override
-  public void onTimer(Object key, TimerInternals.TimerData timerData) {
-    stepContext.setStateInternals(new JStormStateInternals<>(key,
-        kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-    super.onTimer(key, timerData);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java
deleted file mode 100644
index 5c41bda..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import java.io.Serializable;
-import java.util.List;
-import org.apache.beam.runners.core.TimerInternals;
-import org.joda.time.Instant;
-
-/**
- * Interface that tracks input watermarks and manages timers in each bolt.
- */
-public interface TimerService extends Serializable {
-
-  void init(List<Integer> upStreamTasks);
-
-  /**
-   *
-   * @param task
-   * @param inputWatermark
-   * @return new watermark if any timer is triggered during the update of watermark, otherwise 0
-   */
-  long updateInputWatermark(Integer task, long inputWatermark);
-
-  long currentInputWatermark();
-
-  long currentOutputWatermark();
-
-  void clearWatermarkHold(String namespace);
-
-  void addWatermarkHold(String namespace, Instant watermarkHold);
-
-  void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor);
-
-  void fireTimers(long newWatermark);
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
deleted file mode 100644
index 0103095..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkState;
-
-import com.alibaba.jstorm.utils.Pair;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.PriorityQueue;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.sdk.state.TimeDomain;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.joda.time.Instant;
-
-/**
- * Default implementation of {@link TimerService}.
- */
-public class TimerServiceImpl implements TimerService {
-  private transient ExecutorContext executorContext;
-  private transient Map<Integer, DoFnExecutor> idToDoFnExecutor;
-
-  private final ConcurrentMap<Integer, Long> upStreamTaskToInputWatermark =
-      new ConcurrentHashMap<>();
-  private final PriorityQueue<Long> inputWatermarks = new PriorityQueue<>();
-  private final PriorityQueue<Instant> watermarkHolds = new PriorityQueue<>();
-  private final Map<String, Instant> namespaceToWatermarkHold = new HashMap<>();
-  private final transient PriorityQueue<TimerInternals.TimerData> eventTimeTimersQueue =
-      new PriorityQueue<>();
-  private final Map<TimerInternals.TimerData, Set<Pair<Integer, Object>>>
-      timerDataToKeyedExecutors = Maps.newHashMap();
-
-  private boolean initialized = false;
-
-  public TimerServiceImpl() {
-  }
-
-  public TimerServiceImpl(ExecutorContext executorContext) {
-    this.executorContext = executorContext;
-    this.idToDoFnExecutor = executorContext.getExecutorsBolt().getIdToDoFnExecutor();
-  }
-
-  @Override
-  public void init(List<Integer> upStreamTasks) {
-    for (Integer task : upStreamTasks) {
-      upStreamTaskToInputWatermark.put(task, BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
-      inputWatermarks.add(BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
-    }
-    initialized = true;
-  }
-
-  @Override
-  public synchronized long updateInputWatermark(Integer task, long taskInputWatermark) {
-    checkState(initialized, "TimerService has not been initialized.");
-    Long oldTaskInputWatermark = upStreamTaskToInputWatermark.get(task);
-    // Make sure the input watermark don't go backward.
-    if (taskInputWatermark > oldTaskInputWatermark) {
-      upStreamTaskToInputWatermark.put(task, taskInputWatermark);
-      inputWatermarks.add(taskInputWatermark);
-      inputWatermarks.remove(oldTaskInputWatermark);
-
-      long newLocalInputWatermark = currentInputWatermark();
-      if (newLocalInputWatermark > oldTaskInputWatermark) {
-        return newLocalInputWatermark;
-      }
-    }
-    return 0;
-  }
-
-  @Override
-  public void fireTimers(long newWatermark) {
-    TimerInternals.TimerData timerData;
-    while ((timerData = eventTimeTimersQueue.peek()) != null
-        && timerData.getTimestamp().getMillis() <= newWatermark) {
-      for (Pair<Integer, Object> keyedExecutor : timerDataToKeyedExecutors.get(timerData)) {
-        DoFnExecutor executor = idToDoFnExecutor.get(keyedExecutor.getFirst());
-        executor.onTimer(keyedExecutor.getSecond(), timerData);
-      }
-      eventTimeTimersQueue.remove();
-      timerDataToKeyedExecutors.remove(timerData);
-    }
-  }
-
-  @Override
-  public long currentInputWatermark() {
-    return initialized ? inputWatermarks.peek() : BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis();
-  }
-
-  @Override
-  public long currentOutputWatermark() {
-    if (watermarkHolds.isEmpty()) {
-      return currentInputWatermark();
-    } else {
-      return Math.min(currentInputWatermark(), watermarkHolds.peek().getMillis());
-    }
-  }
-
-  @Override
-  public void clearWatermarkHold(String namespace) {
-    Instant currentHold = namespaceToWatermarkHold.get(namespace);
-    if (currentHold != null) {
-      watermarkHolds.remove(currentHold);
-      namespaceToWatermarkHold.remove(namespace);
-    }
-  }
-
-  @Override
-  public void addWatermarkHold(String namespace, Instant watermarkHold) {
-    Instant currentHold = namespaceToWatermarkHold.get(namespace);
-    if (currentHold == null) {
-      namespaceToWatermarkHold.put(namespace, watermarkHold);
-      watermarkHolds.add(watermarkHold);
-    } else if (currentHold != null && watermarkHold.isBefore(currentHold)) {
-      namespaceToWatermarkHold.put(namespace, watermarkHold);
-      watermarkHolds.add(watermarkHold);
-      watermarkHolds.remove(currentHold);
-    }
-  }
-
-  @Override
-  public void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor) {
-    checkArgument(
-        TimeDomain.EVENT_TIME.equals(timerData.getDomain()),
-        String.format("Does not support domain: %s.", timerData.getDomain()));
-    Set<Pair<Integer, Object>> keyedExecutors = timerDataToKeyedExecutors.get(timerData);
-    if (keyedExecutors == null) {
-      keyedExecutors = Sets.newHashSet();
-      eventTimeTimersQueue.add(timerData);
-    }
-    keyedExecutors.add(new Pair<>(doFnExecutor.getInternalDoFnExecutorId(), key));
-    timerDataToKeyedExecutors.put(timerData, keyedExecutors);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
deleted file mode 100644
index 8dc51b5..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-import com.alibaba.jstorm.cache.IKvStore;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.alibaba.jstorm.transactional.bolt.ITransactionStatefulBoltExecutor;
-import java.io.IOException;
-import java.util.Map;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Transactional executors bolt handles the checkpoint and restore of state and timer.
- */
-public class TxExecutorsBolt implements ITransactionStatefulBoltExecutor {
-  private static final Logger LOG = LoggerFactory.getLogger(TxExecutorsBolt.class);
-
-  private static final String TIME_SERVICE_STORE_ID = "timer_service_store";
-  private static final String TIMER_SERVICE_KET = "timer_service_key";
-
-  private ExecutorsBolt executorsBolt;
-  private IKvStoreManager kvStoreManager;
-  private IKvStore<String, TimerService> timerServiceStore;
-
-  public TxExecutorsBolt(ExecutorsBolt executorsBolt) {
-    this.executorsBolt = executorsBolt;
-    this.executorsBolt.setStatefulBolt(true);
-  }
-
-  @Override
-  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
-    try {
-      executorsBolt.prepare(stormConf, context, collector);
-      kvStoreManager = executorsBolt.getExecutorContext().getKvStoreManager();
-      timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
-    } catch (IOException e) {
-      LOG.error("Failed to prepare stateful bolt", e);
-      throw new RuntimeException(e.getMessage());
-    }
-  }
-
-  @Override
-  public void execute(Tuple input) {
-    executorsBolt.execute(input);
-  }
-
-  @Override
-  public void cleanup() {
-    executorsBolt.cleanup();
-  }
-
-  @Override
-  public void declareOutputFields(OutputFieldsDeclarer declarer) {
-    executorsBolt.declareOutputFields(declarer);
-  }
-
-  @Override
-  public Map<String, Object> getComponentConfiguration() {
-    return executorsBolt.getComponentConfiguration();
-  }
-
-  @Override
-  public void initState(Object userState) {
-    LOG.info("Begin to init from state: {}", userState);
-    restore(userState);
-  }
-
-  @Override
-  public Object finishBatch(long batchId) {
-    try {
-      timerServiceStore.put(TIMER_SERVICE_KET, executorsBolt.timerService());
-    } catch (IOException e) {
-      LOG.error("Failed to store current timer service status", e);
-      throw new RuntimeException(e.getMessage());
-    }
-    kvStoreManager.checkpoint(batchId);
-    return null;
-  }
-
-  @Override
-  public Object commit(long batchId, Object state) {
-    return kvStoreManager.backup(batchId);
-  }
-
-  @Override
-  public void rollBack(Object userState) {
-    LOG.info("Begin to rollback from state: {}", userState);
-    restore(userState);
-  }
-
-  @Override
-  public void ackCommit(long batchId, long timeStamp) {
-    kvStoreManager.remove(batchId);
-  }
-
-  private void restore(Object userState) {
-    try {
-      // restore all states
-      kvStoreManager.restore(userState);
-
-      // init timer service
-      timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
-      TimerService timerService = timerServiceStore.get(TIMER_SERVICE_KET);
-      if (timerService == null) {
-        timerService = executorsBolt.initTimerService();
-      }
-      executorsBolt.setTimerService(timerService);
-    } catch (IOException e) {
-      LOG.error("Failed to restore state", e);
-      throw new RuntimeException(e.getMessage());
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
deleted file mode 100644
index 48b410f..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import com.alibaba.jstorm.cache.IKvStore;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.alibaba.jstorm.cache.KvStoreManagerFactory;
-import com.alibaba.jstorm.transactional.spout.ITransactionSpoutExecutor;
-import java.io.IOException;
-import java.util.Map;
-import org.apache.beam.sdk.io.UnboundedSource;
-import org.slf4j.LoggerFactory;
-
-/**
- * Transactional unbounded source spout handles the checkpoint and restore of state and timer.
- */
-public class TxUnboundedSourceSpout implements ITransactionSpoutExecutor {
-  private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(TxUnboundedSourceSpout.class);
-
-  private static final String SOURCE_STORE_ID = "SourceCheckpoint";
-  private static final String CHECKPOINT_MARK = "CheckpointMark";
-
-  private UnboundedSourceSpout sourceSpout;
-  private UnboundedSource.UnboundedReader reader;
-  private IKvStoreManager kvStoreManager;
-  private IKvStore<String, UnboundedSource.CheckpointMark> sourceCheckpointStore;
-
-  public TxUnboundedSourceSpout(UnboundedSourceSpout sourceSpout) {
-    this.sourceSpout = sourceSpout;
-  }
-
-  private void restore(Object userState) {
-    try {
-      kvStoreManager.restore(userState);
-      sourceCheckpointStore = kvStoreManager.getOrCreate(SOURCE_STORE_ID);
-      UnboundedSource.CheckpointMark checkpointMark = sourceCheckpointStore.get(CHECKPOINT_MARK);
-      sourceSpout.createSourceReader(checkpointMark);
-      reader = sourceSpout.getUnboundedSourceReader();
-    } catch (IOException e) {
-      LOG.error("Failed to init state", e);
-      throw new RuntimeException(e.getMessage());
-    }
-  }
-
-  @Override
-  public void initState(Object userState) {
-    restore(userState);
-  }
-
-  @Override
-  public Object finishBatch(long checkpointId) {
-    try {
-      // Store check point mark from unbounded source reader
-      UnboundedSource.CheckpointMark checkpointMark = reader.getCheckpointMark();
-      sourceCheckpointStore.put(CHECKPOINT_MARK, checkpointMark);
-
-      // checkpoint all kv stores in current manager
-      kvStoreManager.checkpoint(checkpointId);
-    } catch (IOException e) {
-      LOG.error(String.format("Failed to finish batch-%s", checkpointId), e);
-      throw new RuntimeException(e.getMessage());
-    }
-    return null;
-  }
-
-  @Override
-  public Object commit(long batchId, Object state) {
-    // backup kv stores to remote state backend
-    return kvStoreManager.backup(batchId);
-  }
-
-  @Override
-  public void rollBack(Object userState) {
-    restore(userState);
-  }
-
-  @Override
-  public void ackCommit(long batchId, long timeStamp) {
-    // remove obsolete state in bolt local and remote state backend
-    kvStoreManager.remove(batchId);
-  }
-
-  @Override
-  public void declareOutputFields(OutputFieldsDeclarer declarer) {
-    sourceSpout.declareOutputFields(declarer);
-  }
-
-  @Override
-  public Map<String, Object> getComponentConfiguration() {
-    return sourceSpout.getComponentConfiguration();
-  }
-
-  @Override
-  public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-    try {
-      sourceSpout.open(conf, context, collector);
-      String storeName = String.format("task-%s", context.getThisTaskId());
-      String storePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
-      kvStoreManager = KvStoreManagerFactory.getKvStoreManagerWithMonitor(
-          context, storeName, storePath, true);
-
-      reader = sourceSpout.getUnboundedSourceReader();
-    } catch (IOException e) {
-      LOG.error("Failed to open transactional unbounded source spout", e);
-      throw new RuntimeException(e.getMessage());
-    }
-  }
-
-  @Override
-  public void close() {
-    sourceSpout.close();
-  }
-
-  @Override
-  public void activate() {
-    sourceSpout.activate();
-  }
-
-  @Override
-  public void deactivate() {
-    sourceSpout.deactivate();
-  }
-
-  @Override
-  public void nextTuple() {
-    sourceSpout.nextTuple();
-  }
-
-  @Override
-  public void ack(Object msgId) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public void fail(Object msgId) {
-    throw new UnsupportedOperationException();
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
deleted file mode 100644
index 690824d..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.tuple.Values;
-import com.alibaba.jstorm.utils.KryoSerializer;
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.beam.runners.jstorm.JStormPipelineOptions;
-import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
-import org.apache.beam.runners.jstorm.util.SerializedPipelineOptions;
-import org.apache.beam.sdk.io.UnboundedSource;
-import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
-import org.apache.beam.sdk.transforms.windowing.PaneInfo;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.TupleTag;
-import org.joda.time.Instant;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Spout implementation that wraps a Beam UnboundedSource.
- * TODO: add wrapper to support metrics in UnboundedSource.
- */
-public class UnboundedSourceSpout extends AbstractComponent implements IRichSpout {
-  private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceSpout.class);
-
-  private final String description;
-  private final UnboundedSource source;
-  private final SerializedPipelineOptions serializedOptions;
-  private final TupleTag<?> outputTag;
-
-  private transient JStormPipelineOptions pipelineOptions;
-  private transient UnboundedSource.UnboundedReader reader;
-  private transient SpoutOutputCollector collector;
-
-  private volatile boolean hasNextRecord;
-  private AtomicBoolean activated = new AtomicBoolean();
-
-  private KryoSerializer<WindowedValue> serializer;
-
-  private long lastWaterMark = 0L;
-
-  public UnboundedSourceSpout(
-      String description,
-      UnboundedSource source,
-      JStormPipelineOptions options,
-      TupleTag<?> outputTag) {
-    this.description = checkNotNull(description, "description");
-    this.source = checkNotNull(source, "source");
-    this.serializedOptions = new SerializedPipelineOptions(checkNotNull(options, "options"));
-    this.outputTag = checkNotNull(outputTag, "outputTag");
-  }
-
-  @Override
-  public synchronized void close() {
-    try {
-      activated.set(false);
-      this.reader.close();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  @Override
-  public void activate() {
-    activated.set(true);
-
-  }
-
-  @Override
-  public void deactivate() {
-    activated.set(false);
-  }
-
-  @Override
-  public void ack(Object msgId) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public void fail(Object msgId) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public Map<String, Object> getComponentConfiguration() {
-    return null;
-  }
-
-  @Override
-  public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-    try {
-      this.collector = collector;
-      this.pipelineOptions =
-          this.serializedOptions.getPipelineOptions().as(JStormPipelineOptions.class);
-
-      createSourceReader(null);
-
-      this.serializer = new KryoSerializer<>(conf);
-    } catch (IOException e) {
-      throw new RuntimeException("Unable to create unbounded reader.", e);
-    }
-  }
-
-  public void createSourceReader(UnboundedSource.CheckpointMark checkpointMark) throws IOException {
-    if (reader != null) {
-      reader.close();
-    }
-    reader = this.source.createReader(this.pipelineOptions, checkpointMark);
-    hasNextRecord = this.reader.start();
-  }
-
-  @Override
-  public synchronized void nextTuple() {
-    if (!activated.get()) {
-      return;
-    }
-    try {
-      if (!hasNextRecord) {
-        hasNextRecord = reader.advance();
-      }
-
-      while (hasNextRecord && activated.get()) {
-        Object value = reader.getCurrent();
-        Instant timestamp = reader.getCurrentTimestamp();
-
-        WindowedValue wv =
-            WindowedValue.of(value, timestamp, GlobalWindow.INSTANCE, PaneInfo.NO_FIRING);
-        LOG.debug("Source output: " + wv.getValue());
-        if (keyedEmit(outputTag.getId())) {
-          KV kv = (KV) wv.getValue();
-          // Convert WindowedValue<KV> to <K, WindowedValue<V>>
-          byte[] immutableValue = serializer.serialize(wv.withValue(kv.getValue()));
-          collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableValue));
-        } else {
-          byte[] immutableValue = serializer.serialize(wv);
-          collector.emit(outputTag.getId(), new Values(immutableValue));
-        }
-
-        // move to next record
-        hasNextRecord = reader.advance();
-      }
-
-      Instant waterMark = reader.getWatermark();
-      if (waterMark != null && lastWaterMark < waterMark.getMillis()) {
-        lastWaterMark = waterMark.getMillis();
-        collector.flush();
-        collector.emit(CommonInstance.BEAM_WATERMARK_STREAM_ID, new Values(waterMark.getMillis()));
-        LOG.debug("Source output: WM-{}", waterMark.toDateTime());
-      }
-    } catch (IOException e) {
-      throw new RuntimeException("Exception reading values from source.", e);
-    }
-  }
-
-  public UnboundedSource getUnboundedSource() {
-    return source;
-  }
-
-  public UnboundedSource.UnboundedReader getUnboundedSourceReader() {
-    return reader;
-  }
-
-  @Override
-  public String toString() {
-    return description;
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java
deleted file mode 100644
index 4320967..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import org.apache.beam.sdk.transforms.View;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-/**
- * JStorm {@link Executor} for {@link View}.
- */
-public class ViewExecutor implements Executor {
-
-  private final String description;
-  private final TupleTag outputTag;
-  private ExecutorsBolt executorsBolt;
-
-  public ViewExecutor(String description, TupleTag outputTag) {
-    this.description = description;
-    this.outputTag = outputTag;
-  }
-
-  @Override
-  public void init(ExecutorContext context) {
-    this.executorsBolt = context.getExecutorsBolt();
-  }
-
-  @Override
-  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-    executorsBolt.processExecutorElem(outputTag, elem);
-  }
-
-  @Override
-  public void cleanup() {
-  }
-
-  @Override
-  public String toString() {
-    return description;
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
deleted file mode 100644
index 3cd0aa9..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import static com.google.common.base.Preconditions.checkArgument;
-
-import com.google.common.collect.Iterables;
-import java.util.Collection;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.transforms.windowing.WindowFn;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.TupleTag;
-import org.joda.time.Instant;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.windowing.Window.Assign}.
- * @param <T>
- * @param <W>
- */
-public class WindowAssignExecutor<T, W extends BoundedWindow> implements Executor {
-  private static final Logger LOG = LoggerFactory.getLogger(WindowAssignExecutor.class);
-
-  private final String description;
-  private WindowFn<T, W> windowFn;
-  private ExecutorsBolt executorsBolt;
-  private TupleTag outputTag;
-
-  class JStormAssignContext<InputT, W extends BoundedWindow>
-      extends WindowFn<InputT, W>.AssignContext {
-    private final WindowedValue<InputT> value;
-
-    JStormAssignContext(WindowFn<InputT, W> fn, WindowedValue<InputT> value) {
-      fn.super();
-      checkArgument(
-          Iterables.size(value.getWindows()) == 1,
-          String.format(
-              "%s passed to window assignment must be in a single window, but it was in %s: %s",
-              WindowedValue.class.getSimpleName(),
-              Iterables.size(value.getWindows()),
-              value.getWindows()));
-      this.value = value;
-    }
-
-    @Override
-    public InputT element() {
-      return value.getValue();
-    }
-
-    @Override
-    public Instant timestamp() {
-      return value.getTimestamp();
-    }
-
-    @Override
-    public BoundedWindow window() {
-      return Iterables.getOnlyElement(value.getWindows());
-    }
-  }
-
-  public WindowAssignExecutor(String description, WindowFn<T, W> windowFn, TupleTag outputTag) {
-    this.description = description;
-    this.windowFn = windowFn;
-    this.outputTag = outputTag;
-  }
-
-  @Override
-  public void init(ExecutorContext context) {
-    this.executorsBolt = context.getExecutorsBolt();
-  }
-
-  @Override
-  public void process(TupleTag tag, WindowedValue elem) {
-    Collection<W> windows = null;
-    try {
-      windows = windowFn.assignWindows(new JStormAssignContext<>(windowFn, elem));
-      for (W window : windows) {
-        executorsBolt.processExecutorElem(
-            outputTag,
-            WindowedValue.of(elem.getValue(), elem.getTimestamp(), window, elem.getPane()));
-      }
-    } catch (Exception e) {
-      LOG.warn("Failed to assign windows for elem=" + elem, e);
-    }
-  }
-
-  @Override
-  public void cleanup() {
-  }
-
-
-  @Override
-  public String toString() {
-    return description;
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
deleted file mode 100644
index df54383..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
+++ /dev/null
@@ -1,180 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime.state;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import com.alibaba.jstorm.cache.ComposedKey;
-import com.alibaba.jstorm.cache.IKvStore;
-import com.alibaba.jstorm.cache.KvStoreIterable;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-import javax.annotation.Nullable;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Implementation of {@link BagState} in JStorm runner.
- */
-class JStormBagState<K, T> implements BagState<T> {
-  private static final Logger LOG = LoggerFactory.getLogger(JStormBagState.class);
-
-  @Nullable
-  private final K key;
-  private final StateNamespace namespace;
-  private final IKvStore<ComposedKey, T> kvState;
-  private final IKvStore<ComposedKey, Object> stateInfoKvState;
-  private int elemIndex;
-
-  public JStormBagState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState,
-                        IKvStore<ComposedKey, Object> stateInfoKvState) throws IOException {
-    this.key = key;
-    this.namespace = checkNotNull(namespace, "namespace");
-    this.kvState = checkNotNull(kvState, "kvState");
-    this.stateInfoKvState = checkNotNull(stateInfoKvState, "stateInfoKvState");
-
-    Integer index = (Integer) stateInfoKvState.get(getComposedKey());
-    this.elemIndex = index != null ? ++index : 0;
-  }
-
-  @Override
-  public void add(T input) {
-    try {
-      kvState.put(getComposedKey(elemIndex), input);
-      stateInfoKvState.put(getComposedKey(), elemIndex);
-      elemIndex++;
-    } catch (IOException e) {
-      throw new RuntimeException(e.getCause());
-    }
-  }
-
-  @Override
-  public ReadableState<Boolean> isEmpty() {
-    return new ReadableState<Boolean>() {
-      @Override
-      public Boolean read() {
-        return elemIndex <= 0;
-      }
-
-      @Override
-      public ReadableState<Boolean> readLater() {
-        // TODO: support prefetch.
-        return this;
-      }
-    };
-  }
-
-  @Override
-  public Iterable<T> read() {
-    return new BagStateIterable(elemIndex);
-  }
-
-  @Override
-  public BagState readLater() {
-    // TODO: support prefetch.
-    return this;
-  }
-
-  @Override
-  public void clear() {
-    try {
-      for (int i = 0; i < elemIndex; i++) {
-        kvState.remove(getComposedKey(i));
-      }
-      stateInfoKvState.remove(getComposedKey());
-      elemIndex = 0;
-    } catch (IOException e) {
-      throw new RuntimeException(e.getCause());
-    }
-  }
-
-  private ComposedKey getComposedKey() {
-    return ComposedKey.of(key, namespace);
-  }
-
-  private ComposedKey getComposedKey(int elemIndex) {
-    return ComposedKey.of(key, namespace, elemIndex);
-  }
-
-  /**
-   * Implementation of Bag state Iterable.
-   */
-  private class BagStateIterable implements KvStoreIterable<T> {
-
-    private class BagStateIterator implements Iterator<T> {
-      private final int size;
-      private int cursor = 0;
-
-      BagStateIterator() {
-        Integer s = null;
-        try {
-          s = (Integer) stateInfoKvState.get(getComposedKey());
-        } catch (IOException e) {
-          LOG.error("Failed to get elemIndex for key={}", getComposedKey());
-        }
-        this.size = s != null ? ++s : 0;
-      }
-
-      @Override
-      public boolean hasNext() {
-        return cursor < size;
-      }
-
-      @Override
-      public T next() {
-        if (cursor >= size) {
-          throw new NoSuchElementException();
-        }
-
-        T value = null;
-        try {
-          value = kvState.get(getComposedKey(cursor));
-        } catch (IOException e) {
-          LOG.error("Failed to read composed key-[{}]", getComposedKey(cursor));
-        }
-        cursor++;
-        return value;
-      }
-
-      @Override
-      public void remove() {
-        throw new UnsupportedOperationException();
-      }
-    }
-
-    private final int size;
-
-    BagStateIterable(int size) {
-      this.size = size;
-    }
-
-    @Override
-    public Iterator<T> iterator() {
-      return new BagStateIterator();
-    }
-
-    @Override
-    public String toString() {
-      return String.format("BagStateIterable: composedKey=%s", getComposedKey());
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java
deleted file mode 100644
index 7c6a239..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime.state;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import javax.annotation.Nullable;
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.CombiningState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.apache.beam.sdk.transforms.Combine;
-
-/**
- * JStorm implementation of {@link CombiningState}.
- */
-public class JStormCombiningState<InputT, AccumT, OutputT>
-    implements CombiningState<InputT, AccumT, OutputT> {
-
-  @Nullable
-  private final BagState<AccumT> accumBagState;
-  private final Combine.CombineFn<InputT, AccumT, OutputT> combineFn;
-
-  JStormCombiningState(
-      BagState<AccumT> accumBagState,
-      Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
-    this.accumBagState = checkNotNull(accumBagState, "accumBagState");
-    this.combineFn = checkNotNull(combineFn, "combineFn");
-  }
-
-  @Override
-  public AccumT getAccum() {
-    // TODO: replacing the accumBagState with the merged accum.
-    return combineFn.mergeAccumulators(accumBagState.read());
-  }
-
-  @Override
-  public void addAccum(AccumT accumT) {
-    accumBagState.add(accumT);
-  }
-
-  @Override
-  public AccumT mergeAccumulators(Iterable<AccumT> iterable) {
-    return combineFn.mergeAccumulators(iterable);
-  }
-
-  @Override
-  public void add(InputT input) {
-    accumBagState.add(
-        combineFn.addInput(combineFn.createAccumulator(), input));
-  }
-
-  @Override
-  public ReadableState<Boolean> isEmpty() {
-    return accumBagState.isEmpty();
-  }
-
-  @Override
-  public OutputT read() {
-    return combineFn.extractOutput(
-        combineFn.mergeAccumulators(accumBagState.read()));
-  }
-
-  @Override
-  public CombiningState<InputT, AccumT, OutputT> readLater() {
-    // TODO: support prefetch.
-    return this;
-  }
-
-  @Override
-  public void clear() {
-    accumBagState.clear();
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
deleted file mode 100644
index ac3f91f..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime.state;
-
-import com.alibaba.jstorm.cache.IKvStore;
-import java.io.IOException;
-import java.util.Map;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.MapState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Implementation of {@link MapState} in JStorm runner.
- * @param <K>
- * @param <V>
- */
-public class JStormMapState<K, V> implements MapState<K, V> {
-  private static final Logger LOG = LoggerFactory.getLogger(JStormMapState.class);
-
-  private final K key;
-  private final StateNamespace namespace;
-  private IKvStore<K, V> kvStore;
-
-  public JStormMapState(K key, StateNamespace namespace, IKvStore<K, V> kvStore) {
-    this.key = key;
-    this.namespace = namespace;
-    this.kvStore = kvStore;
-  }
-
-  @Override
-  public void put(K var1, V var2) {
-    try {
-      kvStore.put(var1, var2);
-    } catch (IOException e) {
-      reportError(String.format("Failed to put key=%s, value=%s", var1, var2), e);
-    }
-  }
-
-  @Override
-  public ReadableState<V> putIfAbsent(K var1, V var2) {
-    ReadableState<V> ret = null;
-    try {
-      V value = kvStore.get(var1);
-      if (value == null) {
-        kvStore.put(var1, var2);
-        ret = new MapReadableState<>(null);
-      } else {
-        ret = new MapReadableState<>(value);
-      }
-    } catch (IOException e) {
-      reportError(String.format("Failed to putIfAbsent key=%s, value=%s", var1, var2), e);
-    }
-    return ret;
-  }
-
-  @Override
-  public void remove(K var1) {
-    try {
-      kvStore.remove(var1);
-    } catch (IOException e) {
-      reportError(String.format("Failed to remove key=%s", var1), e);
-    }
-  }
-
-  @Override
-  public ReadableState<V> get(K var1) {
-    ReadableState<V> ret = new MapReadableState<>(null);
-    try {
-      ret = new MapReadableState(kvStore.get(var1));
-    } catch (IOException e) {
-      reportError(String.format("Failed to get value for key=%s", var1), e);
-    }
-    return ret;
-  }
-
-  @Override
-  public ReadableState<Iterable<K>> keys() {
-    ReadableState<Iterable<K>> ret = new MapReadableState<>(null);
-    try {
-      ret = new MapReadableState<>(kvStore.keys());
-    } catch (IOException e) {
-      reportError(String.format("Failed to get keys"), e);
-    }
-    return ret;
-  }
-
-  @Override
-  public ReadableState<Iterable<V>> values() {
-    ReadableState<Iterable<V>> ret = new MapReadableState<>(null);
-    try {
-      ret = new MapReadableState<>(kvStore.values());
-    } catch (IOException e) {
-      reportError(String.format("Failed to get values"), e);
-    }
-    return ret;
-  }
-
-  @Override
-  public ReadableState<Iterable<Map.Entry<K, V>>> entries() {
-    ReadableState<Iterable<Map.Entry<K, V>>> ret = new MapReadableState<>(null);
-    try {
-      ret = new MapReadableState<>(kvStore.entries());
-    } catch (IOException e) {
-      reportError(String.format("Failed to get values"), e);
-    }
-    return ret;
-  }
-
-  @Override
-  public void clear() {
-    try {
-      Iterable<K> keys = kvStore.keys();
-      kvStore.removeBatch(keys);
-    } catch (IOException e) {
-      reportError(String.format("Failed to clear map state"), e);
-    }
-  }
-
-  private void reportError(String errorInfo, IOException e) {
-    LOG.error(errorInfo, e);
-    throw new RuntimeException(errorInfo);
-  }
-
-  private class MapReadableState<T> implements ReadableState<T> {
-    private T value;
-
-    public MapReadableState(T value) {
-      this.value = value;
-    }
-
-    @Override
-    public T read() {
-      return value;
-    }
-
-    @Override
-    public ReadableState<T> readLater() {
-      return this;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java
deleted file mode 100644
index 80ef3a2..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime.state;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import com.alibaba.jstorm.cache.ComposedKey;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import java.io.IOException;
-import javax.annotation.Nullable;
-import org.apache.beam.runners.core.StateInternals;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.runners.core.StateTag;
-import org.apache.beam.runners.jstorm.translation.runtime.TimerService;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.CombiningState;
-import org.apache.beam.sdk.state.MapState;
-import org.apache.beam.sdk.state.SetState;
-import org.apache.beam.sdk.state.State;
-import org.apache.beam.sdk.state.StateBinder;
-import org.apache.beam.sdk.state.StateContext;
-import org.apache.beam.sdk.state.StateSpec;
-import org.apache.beam.sdk.state.ValueState;
-import org.apache.beam.sdk.state.WatermarkHoldState;
-import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.Combine.BinaryCombineFn;
-import org.apache.beam.sdk.transforms.CombineWithContext;
-import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
-import org.joda.time.Instant;
-
-/**
- * JStorm implementation of {@link StateInternals}.
- */
-public class JStormStateInternals<K> implements StateInternals {
-
-  private static final String STATE_INFO = "state-info:";
-
-  @Nullable
-  private final K key;
-  private final IKvStoreManager kvStoreManager;
-  private final TimerService timerService;
-  private final int executorId;
-
-  public JStormStateInternals(K key, IKvStoreManager kvStoreManager,
-                              TimerService timerService, int executorId) {
-    this.key = key;
-    this.kvStoreManager = checkNotNull(kvStoreManager, "kvStoreManager");
-    this.timerService = checkNotNull(timerService, "timerService");
-    this.executorId = executorId;
-  }
-
-  @Nullable
-  @Override
-  public K getKey() {
-    return key;
-  }
-
-  @Override
-  public <T extends State> T state(
-      StateNamespace namespace, StateTag<T> address, StateContext<?> c) {
-    // throw new UnsupportedOperationException("StateContext is not supported.");
-    /**
-     * TODO:
-     * Same implementation as state() which is without StateContext. This might be updated after
-     * we figure out if we really need StateContext for JStorm state internals.
-     */
-    return state(namespace, address);
-  }
-
-  @Override
-  public <T extends State> T state(final StateNamespace namespace, StateTag<T> address) {
-    return address.getSpec().bind(address.getId(), new StateBinder() {
-      @Override
-      public <T> ValueState<T> bindValue(String id, StateSpec<ValueState<T>> spec, Coder<T> coder) {
-        try {
-          return new JStormValueState<>(
-              getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)));
-        } catch (IOException e) {
-          throw new RuntimeException();
-        }
-      }
-
-      @Override
-      public <T> BagState<T> bindBag(String id, StateSpec<BagState<T>> spec, Coder<T> elemCoder) {
-        try {
-          return new JStormBagState(
-              getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)),
-              kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
-        } catch (IOException e) {
-          throw new RuntimeException();
-        }
-      }
-
-      @Override
-      public <T> SetState<T> bindSet(String id, StateSpec<SetState<T>> spec, Coder<T> elemCoder) {
-        throw new UnsupportedOperationException();
-      }
-
-      @Override
-      public <KeyT, ValueT> MapState<KeyT, ValueT> bindMap(
-          String id,
-          StateSpec<MapState<KeyT, ValueT>> spec,
-          Coder<KeyT> mapKeyCoder,
-          Coder<ValueT> mapValueCoder) {
-        try {
-          return new JStormMapState<>(
-              getKey(), namespace, kvStoreManager.<KeyT, ValueT>getOrCreate(getStoreId(id)));
-        } catch (IOException e) {
-          throw new RuntimeException(e);
-        }
-      }
-
-      @Override
-      public <InputT, AccumT, OutputT> CombiningState bindCombining(
-          String id,
-          StateSpec<CombiningState<InputT, AccumT, OutputT>> spec,
-          Coder<AccumT> accumCoder,
-          Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
-        try {
-          BagState<AccumT> accumBagState = new JStormBagState(
-              getKey(), namespace,
-              kvStoreManager.<ComposedKey, AccumT>getOrCreate(getStoreId(id)),
-              kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
-          return new JStormCombiningState<>(accumBagState, combineFn);
-        } catch (IOException e) {
-          throw new RuntimeException();
-        }
-      }
-
-
-      @Override
-      public <InputT, AccumT, OutputT> CombiningState<InputT, AccumT, OutputT>
-      bindCombiningWithContext(
-          String id,
-          StateSpec<CombiningState<InputT, AccumT, OutputT>> stateSpec, Coder<AccumT> coder,
-          CombineWithContext.CombineFnWithContext<InputT, AccumT, OutputT> combineFnWithContext) {
-        throw new UnsupportedOperationException();
-      }
-
-      @Override
-      public WatermarkHoldState bindWatermark(
-          String id,
-          StateSpec<WatermarkHoldState> spec,
-          final TimestampCombiner timestampCombiner) {
-        try {
-          BagState<Combine.Holder<Instant>> accumBagState = new JStormBagState(
-              getKey(), namespace,
-              kvStoreManager.<ComposedKey, Combine.Holder<Instant>>getOrCreate(getStoreId(id)),
-              kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
-
-          Combine.CombineFn<Instant, Combine.Holder<Instant>, Instant> outputTimeCombineFn =
-              new BinaryCombineFn<Instant>() {
-                @Override
-                public Instant apply(Instant left, Instant right) {
-                  return timestampCombiner.combine(left, right);
-                }
-              };
-          return new JStormWatermarkHoldState(
-              namespace,
-              new JStormCombiningState<>(
-                  accumBagState,
-                  outputTimeCombineFn),
-              timestampCombiner,
-              timerService);
-        } catch (IOException e) {
-          throw new RuntimeException();
-        }
-      }
-    });
-  }
-
-  private String getStoreId(String stateId) {
-    return String.format("%s-%s", stateId, executorId);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java
deleted file mode 100644
index 79ff6b4..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime.state;
-
-import com.alibaba.jstorm.cache.ComposedKey;
-import com.alibaba.jstorm.cache.IKvStore;
-import java.io.IOException;
-import javax.annotation.Nullable;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.sdk.state.ValueState;
-
-/**
- * JStorm implementation of {@link ValueState}.
- */
-public class JStormValueState<K, T> implements ValueState<T> {
-
-  @Nullable
-  private final K key;
-  private final StateNamespace namespace;
-  private final IKvStore<ComposedKey, T> kvState;
-
-  JStormValueState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState) {
-    this.key = key;
-    this.namespace = namespace;
-    this.kvState = kvState;
-  }
-
-  @Override
-  public void write(T t) {
-    try {
-      kvState.put(getComposedKey(), t);
-    } catch (IOException e) {
-      throw new RuntimeException(String.format(
-          "Failed to write key: %s, namespace: %s, value: %s.", key, namespace, t));
-    }
-  }
-
-  @Override
-  public T read() {
-    try {
-      return kvState.get(getComposedKey());
-    } catch (IOException e) {
-      throw new RuntimeException(String.format(
-          "Failed to read key: %s, namespace: %s.", key, namespace));
-    }
-  }
-
-  @Override
-  public ValueState<T> readLater() {
-    // TODO: support prefetch.
-    return this;
-  }
-
-  @Override
-  public void clear() {
-    try {
-      kvState.remove(getComposedKey());
-    } catch (IOException e) {
-      throw new RuntimeException(String.format(
-          "Failed to clear key: %s, namespace: %s.", key, namespace));
-    }
-  }
-
-  private ComposedKey getComposedKey() {
-    return ComposedKey.of(key, namespace);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java
deleted file mode 100644
index dc3ba43..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime.state;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.runners.jstorm.translation.runtime.TimerService;
-import org.apache.beam.sdk.state.GroupingState;
-import org.apache.beam.sdk.state.ReadableState;
-import org.apache.beam.sdk.state.WatermarkHoldState;
-import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
-import org.joda.time.Instant;
-
-/**
- * JStorm implementation of {@link WatermarkHoldState}.
- */
-public class JStormWatermarkHoldState implements WatermarkHoldState {
-
-  private final StateNamespace namespace;
-  private final GroupingState<Instant, Instant> watermarkHoldsState;
-  private final TimestampCombiner timestampCombiner;
-  private final TimerService timerService;
-
-  JStormWatermarkHoldState(
-      StateNamespace namespace,
-      GroupingState<Instant, Instant> watermarkHoldsState,
-      TimestampCombiner timestampCombiner,
-      TimerService timerService) {
-    this.namespace = checkNotNull(namespace, "namespace");
-    this.watermarkHoldsState = checkNotNull(watermarkHoldsState, "watermarkHoldsState");
-    this.timestampCombiner = checkNotNull(timestampCombiner, "timestampCombiner");
-    this.timerService = checkNotNull(timerService, "timerService");
-  }
-
-  @Override
-  public TimestampCombiner getTimestampCombiner() {
-    return timestampCombiner;
-  }
-
-  @Override
-  public void add(Instant instant) {
-    timerService.addWatermarkHold(namespace.stringKey(), instant);
-    watermarkHoldsState.add(instant);
-  }
-
-  @Override
-  public ReadableState<Boolean> isEmpty() {
-    return watermarkHoldsState.isEmpty();
-  }
-
-  @Override
-  public Instant read() {
-    return watermarkHoldsState.read();
-  }
-
-  @Override
-  public WatermarkHoldState readLater() {
-    // TODO: support prefetch.
-    return this;
-  }
-
-  @Override
-  public void clear() {
-    timerService.clearWatermarkHold(namespace.stringKey());
-    watermarkHoldsState.clear();
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java
deleted file mode 100644
index 184a957..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime.timer;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import javax.annotation.Nullable;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.jstorm.translation.runtime.DoFnExecutor;
-import org.apache.beam.runners.jstorm.translation.runtime.TimerService;
-import org.apache.beam.sdk.state.TimeDomain;
-import org.joda.time.Instant;
-
-/**
- * JStorm implementation of {@link TimerInternals}.
- */
-public class JStormTimerInternals<K> implements TimerInternals {
-
-  private final K key;
-  private final DoFnExecutor<?, ?> doFnExecutor;
-  private final TimerService timerService;
-
-
-  public JStormTimerInternals(
-      @Nullable K key, DoFnExecutor<?, ?> doFnExecutor, TimerService timerService) {
-    this.key = key;
-    this.doFnExecutor = checkNotNull(doFnExecutor, "doFnExecutor");
-    this.timerService = checkNotNull(timerService, "timerService");
-  }
-
-  @Override
-  public void setTimer(
-      StateNamespace namespace, String timerId, Instant target, TimeDomain timeDomain) {
-    setTimer(TimerData.of(timerId, namespace, target, timeDomain));
-  }
-
-  @Override
-  @Deprecated
-  public void setTimer(TimerData timerData) {
-    timerService.setTimer(key, timerData, doFnExecutor);
-  }
-
-  @Override
-  public void deleteTimer(StateNamespace namespace, String timerId, TimeDomain timeDomain) {
-    throw new UnsupportedOperationException(
-        "Canceling of a timer is not yet supported.");
-  }
-
-  @Override
-  @Deprecated
-  public void deleteTimer(StateNamespace namespace, String timerId) {
-    throw new UnsupportedOperationException(
-        "Canceling of a timer is not yet supported.");
-  }
-
-  @Override
-  @Deprecated
-  public void deleteTimer(TimerData timerData) {
-    throw new UnsupportedOperationException(
-        "Canceling of a timer is not yet supported.");
-  }
-
-  @Override
-  public Instant currentProcessingTime() {
-    return Instant.now();
-  }
-
-  @Override
-  @Nullable
-  public Instant currentSynchronizedProcessingTime() {
-    return null;
-  }
-
-  @Override
-  public Instant currentInputWatermarkTime() {
-    return new Instant(timerService.currentInputWatermark());
-  }
-
-  @Override
-  @Nullable
-  public Instant currentOutputWatermarkTime() {
-    return new Instant(timerService.currentOutputWatermark());
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java
deleted file mode 100644
index 7e7a54a..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import org.apache.beam.runners.core.construction.UnboundedReadFromBoundedSource;
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
-import org.apache.beam.sdk.io.Read;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.TaggedPValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-/**
- * Translates a {@link Read.Bounded} into a Storm spout.
- *
- * @param <T>
- */
-public class BoundedSourceTranslator<T> extends TransformTranslator.Default<Read.Bounded<T>> {
-
-  @Override
-  public void translateNode(Read.Bounded<T> transform, TranslationContext context) {
-    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-    String description =
-        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-
-    TupleTag<?> outputTag = userGraphContext.getOutputTag();
-    PValue outputValue = userGraphContext.getOutput();
-    UnboundedSourceSpout spout = new UnboundedSourceSpout(
-        description,
-        new UnboundedReadFromBoundedSource.BoundedToUnboundedSourceAdapter(transform.getSource()),
-        userGraphContext.getOptions(), outputTag);
-
-    context.getExecutionGraphContext().registerSpout(
-        spout, TaggedPValue.of(outputTag, outputValue));
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
deleted file mode 100644
index 44ce8d8..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import com.google.common.collect.Maps;
-import java.util.Map;
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.runtime.FlattenExecutor;
-import org.apache.beam.sdk.transforms.Flatten;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-/**
- * Translates a {@link Flatten} to a JStorm {@link FlattenExecutor}.
- * @param <V>
- */
-public class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PCollections<V>> {
-
-  @Override
-  public void translateNode(Flatten.PCollections<V> transform, TranslationContext context) {
-    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-
-    // Since a new tag is created in PCollectionList, retrieve the real tag here.
-    Map<TupleTag<?>, PValue> inputs = Maps.newHashMap();
-    for (Map.Entry<TupleTag<?>, PValue> entry : userGraphContext.getInputs().entrySet()) {
-      PCollection<V> pc = (PCollection<V>) entry.getValue();
-      inputs.putAll(pc.expand());
-    }
-    System.out.println("Real inputs: " + inputs);
-    System.out.println("FlattenList inputs: " + userGraphContext.getInputs());
-    String description = describeTransform(transform, inputs, userGraphContext.getOutputs());
-    FlattenExecutor executor = new FlattenExecutor(description, userGraphContext.getOutputTag());
-    context.addTransformExecutor(executor, inputs, userGraphContext.getOutputs());
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
deleted file mode 100644
index 85cb85d..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import com.google.common.collect.Lists;
-import java.util.Collections;
-import java.util.List;
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.runtime.GroupByWindowExecutor;
-import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.WindowingStrategy;
-
-/**
- * Translates a {@link GroupByKey} to a JStorm {@link GroupByWindowExecutor}.
- * @param <K>
- * @param <V>
- */
-public class GroupByKeyTranslator<K, V> extends TransformTranslator.Default<GroupByKey<K, V>> {
-  // information of transform
-  protected PCollection<KV<K, V>> input;
-  protected PCollection<KV<K, Iterable<V>>> output;
-  protected List<TupleTag<?>> inputTags;
-  protected TupleTag<KV<K, Iterable<V>>> mainOutputTag;
-  protected List<TupleTag<?>> sideOutputTags;
-  protected List<PCollectionView<?>> sideInputs;
-  protected WindowingStrategy<?, ?> windowingStrategy;
-
-  @Override
-  public void translateNode(GroupByKey<K, V> transform, TranslationContext context) {
-    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-    String description =
-        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-
-    input = (PCollection<KV<K, V>>) userGraphContext.getInput();
-    output = (PCollection<KV<K, Iterable<V>>>) userGraphContext.getOutput();
-
-    inputTags = userGraphContext.getInputTags();
-    mainOutputTag = (TupleTag<KV<K, Iterable<V>>>) userGraphContext.getOutputTag();
-    sideOutputTags = Lists.newArrayList();
-
-    sideInputs = Collections.<PCollectionView<?>>emptyList();
-    windowingStrategy = input.getWindowingStrategy();
-
-    GroupByWindowExecutor<K, V> groupByWindowExecutor = new GroupByWindowExecutor<>(
-        userGraphContext.getStepName(),
-        description,
-        context,
-        context.getUserGraphContext().getOptions(),
-        windowingStrategy,
-        mainOutputTag,
-        sideOutputTags);
-    context.addTransformExecutor(groupByWindowExecutor);
-  }
-}


[30/53] [abbrv] beam git commit: jstorm-runner: support deleteTimer in JStormTimerInternals.

Posted by pe...@apache.org.
jstorm-runner: support deleteTimer in JStormTimerInternals.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/18198330
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/18198330
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/18198330

Branch: refs/heads/jstorm-runner
Commit: 18198330d42a13d3d8dd96cccdbd07ba077b9408
Parents: af5221c
Author: Pei He <pe...@apache.org>
Authored: Tue Jul 18 20:07:19 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:58 2017 +0800

----------------------------------------------------------------------
 .../runners/jstorm/translation/JStormTimerInternals.java    | 3 +--
 .../beam/runners/jstorm/translation/TimerService.java       | 2 ++
 .../beam/runners/jstorm/translation/TimerServiceImpl.java   | 9 +++++++++
 3 files changed, 12 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/18198330/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormTimerInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormTimerInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormTimerInternals.java
index 4c96541..0e9ee35 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormTimerInternals.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/JStormTimerInternals.java
@@ -69,8 +69,7 @@ class JStormTimerInternals<K> implements TimerInternals {
   @Override
   @Deprecated
   public void deleteTimer(TimerData timerData) {
-    throw new UnsupportedOperationException(
-        "Canceling of a timer is not yet supported.");
+    timerService.deleteTimer(timerData);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/18198330/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java
index 29345aa..24a9050 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerService.java
@@ -48,4 +48,6 @@ interface TimerService extends Serializable {
   void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor);
 
   void fireTimers(long newWatermark);
+
+  void deleteTimer(TimerInternals.TimerData timerData);
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/18198330/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java
index c2600e5..6b463db 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TimerServiceImpl.java
@@ -152,4 +152,13 @@ class TimerServiceImpl implements TimerService {
     keyedExecutors.add(new Pair<>(doFnExecutor.getInternalDoFnExecutorId(), key));
     timerDataToKeyedExecutors.put(timerData, keyedExecutors);
   }
+
+  @Override
+  public void deleteTimer(TimerInternals.TimerData timerData) {
+    checkArgument(
+        TimeDomain.EVENT_TIME.equals(timerData.getDomain()),
+        String.format("Does not support domain: %s.", timerData.getDomain()));
+    eventTimeTimersQueue.remove(timerData);
+    timerDataToKeyedExecutors.remove(timerData);
+  }
 }


[15/53] [abbrv] beam git commit: jstorm-runner: fix checkstyles.

Posted by pe...@apache.org.
jstorm-runner: fix checkstyles.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/aa251a4a
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/aa251a4a
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/aa251a4a

Branch: refs/heads/jstorm-runner
Commit: aa251a4a4d2850310f5dfd9db4d605cce41bba13
Parents: f3df3a2
Author: Pei He <pe...@apache.org>
Authored: Thu Jul 13 17:37:51 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:47 2017 +0800

----------------------------------------------------------------------
 .../beam/runners/jstorm/JStormRunner.java       | 395 +++++------
 .../runners/jstorm/JStormRunnerRegistrar.java   |  39 +-
 .../beam/runners/jstorm/JStormRunnerResult.java | 118 ++--
 .../beam/runners/jstorm/TestJStormRunner.java   | 188 +++---
 .../serialization/ImmutableListSerializer.java  | 152 +++--
 .../serialization/ImmutableMapSerializer.java   |  78 ++-
 .../serialization/ImmutableSetSerializer.java   |  93 +--
 .../KvStoreIterableSerializer.java              |  73 +-
 .../SdkRepackImmuListSerializer.java            | 116 ++--
 .../SdkRepackImmuSetSerializer.java             |  98 +--
 .../UnmodifiableCollectionsSerializer.java      | 290 ++++----
 .../translation/StormPipelineTranslator.java    | 273 ++++----
 .../jstorm/translation/TranslationContext.java  | 667 ++++++++++---------
 .../jstorm/translation/TranslatorRegistry.java  |  65 +-
 .../translation/runtime/AbstractComponent.java  |  66 +-
 .../translation/runtime/AdaptorBasicBolt.java   |   2 +-
 .../translation/runtime/AdaptorBasicSpout.java  |   2 +-
 .../translation/runtime/DoFnExecutor.java       | 511 +++++++-------
 .../runtime/DoFnRunnerWithMetrics.java          |   3 +-
 .../jstorm/translation/runtime/Executor.java    |  13 +-
 .../translation/runtime/ExecutorContext.java    |  15 +-
 .../translation/runtime/ExecutorsBolt.java      | 502 +++++++-------
 .../translation/runtime/FlattenExecutor.java    |  61 +-
 .../runtime/GroupByWindowExecutor.java          | 231 ++++---
 .../runtime/MultiOutputDoFnExecutor.java        |  79 ++-
 .../runtime/MultiStatefulDoFnExecutor.java      |  64 +-
 .../runtime/StatefulDoFnExecutor.java           |  63 +-
 .../translation/runtime/TimerService.java       |  37 +-
 .../translation/runtime/TimerServiceImpl.java   | 233 +++----
 .../translation/runtime/TxExecutorsBolt.java    | 193 +++---
 .../runtime/TxUnboundedSourceSpout.java         | 244 +++----
 .../runtime/UnboundedSourceSpout.java           | 288 ++++----
 .../translation/runtime/ViewExecutor.java       |  53 +-
 .../runtime/WindowAssignExecutor.java           | 130 ++--
 .../runtime/state/JStormBagState.java           | 261 ++++----
 .../runtime/state/JStormCombiningState.java     |  98 +--
 .../runtime/state/JStormMapState.java           | 227 ++++---
 .../runtime/state/JStormStateInternals.java     | 290 ++++----
 .../runtime/state/JStormValueState.java         |  92 ++-
 .../runtime/state/JStormWatermarkHoldState.java |  88 +--
 .../runtime/timer/JStormTimerInternals.java     | 143 ++--
 .../translator/BoundedSourceTranslator.java     |  29 +-
 .../translator/CombineGloballyTranslator.java   |   5 +-
 .../translator/CombinePerKeyTranslator.java     |   5 +-
 .../translator/FlattenTranslator.java           |  34 +-
 .../translator/GroupByKeyTranslator.java        |  71 +-
 .../translator/ParDoBoundMultiTranslator.java   | 143 ++--
 .../translator/ParDoBoundTranslator.java        | 128 ++--
 .../translator/ReshuffleTranslator.java         |   4 +-
 .../jstorm/translation/translator/Stream.java   | 109 +--
 .../translator/TransformTranslator.java         |  74 +-
 .../translator/UnboundedSourceTranslator.java   |  28 +-
 .../translation/translator/ViewTranslator.java  | 586 ++++++++--------
 .../translator/WindowAssignTranslator.java      |  26 +-
 .../translator/WindowBoundTranslator.java       |  26 +-
 .../jstorm/translation/util/CommonInstance.java |   6 +-
 .../util/DefaultSideInputReader.java            |  33 +-
 .../translation/util/DefaultStepContext.java    |  89 +--
 .../beam/runners/jstorm/util/RunnerUtils.java   |  46 +-
 .../jstorm/util/SerializedPipelineOptions.java  |  51 +-
 .../jstorm/util/SingletonKeyedWorkItem.java     |   3 +-
 .../jstorm/JStormRunnerRegistrarTest.java       |   4 +-
 .../runtime/state/JStormStateInternalsTest.java | 345 +++++-----
 63 files changed, 4314 insertions(+), 4165 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
index 39c723b..5fdbe4d 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunner.java
@@ -17,8 +17,6 @@
  */
 package org.apache.beam.runners.jstorm;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import backtype.storm.Config;
 import backtype.storm.LocalCluster;
 import backtype.storm.StormSubmitter;
@@ -31,8 +29,6 @@ import backtype.storm.tuple.Fields;
 import com.alibaba.jstorm.cache.KvStoreIterable;
 import com.alibaba.jstorm.cluster.StormConfig;
 import com.alibaba.jstorm.transactional.TransactionTopologyBuilder;
-import com.alibaba.jstorm.utils.JStormUtils;
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 import org.apache.beam.runners.jstorm.serialization.ImmutableListSerializer;
@@ -54,12 +50,9 @@ import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
 import org.apache.beam.runners.jstorm.translation.translator.Stream;
 import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
 import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.PipelineResult;
 import org.apache.beam.sdk.PipelineRunner;
-import org.apache.beam.sdk.metrics.MetricResults;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsValidator;
-import org.joda.time.Duration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -70,204 +63,218 @@ import org.slf4j.LoggerFactory;
 public class JStormRunner extends PipelineRunner<JStormRunnerResult> {
 
 
-    private static final Logger LOG = LoggerFactory.getLogger(JStormRunner.class);
-
-    private JStormPipelineOptions options;
-
-    public JStormRunner(JStormPipelineOptions options) {
-        this.options = options;
+  private static final Logger LOG = LoggerFactory.getLogger(JStormRunner.class);
+
+  private JStormPipelineOptions options;
+
+  public JStormRunner(JStormPipelineOptions options) {
+    this.options = options;
+  }
+
+  public static JStormRunner fromOptions(PipelineOptions options) {
+    JStormPipelineOptions pipelineOptions = PipelineOptionsValidator.validate(
+        JStormPipelineOptions.class, options);
+    return new JStormRunner(pipelineOptions);
+  }
+
+  /**
+   * convert pipeline options to storm configuration format
+   *
+   * @param options
+   * @return
+   */
+  private Config convertPipelineOptionsToConfig(JStormPipelineOptions options) {
+    Config config = new Config();
+    if (options.getLocalMode())
+      config.put(Config.STORM_CLUSTER_MODE, "local");
+    else
+      config.put(Config.STORM_CLUSTER_MODE, "distributed");
+
+    Config.setNumWorkers(config, options.getWorkerNumber());
+
+    config.putAll(options.getTopologyConfig());
+
+    // Setup config for runtime env
+    config.put("worker.external", "beam");
+    config.put("topology.acker.executors", 0);
+
+    UnmodifiableCollectionsSerializer.registerSerializers(config);
+    // register classes of guava utils, ImmutableList, ImmutableSet, ImmutableMap
+    ImmutableListSerializer.registerSerializers(config);
+    SdkRepackImmuListSerializer.registerSerializers(config);
+    ImmutableSetSerializer.registerSerializers(config);
+    SdkRepackImmuSetSerializer.registerSerializers(config);
+    ImmutableMapSerializer.registerSerializers(config);
+
+    config.registerDefaultSerailizer(KvStoreIterable.class, KvStoreIterableSerializer.class);
+    return config;
+  }
+
+  @Override
+  public JStormRunnerResult run(Pipeline pipeline) {
+    LOG.info("Running pipeline...");
+    TranslationContext context = new TranslationContext(this.options);
+    StormPipelineTranslator transformer = new StormPipelineTranslator(context);
+    transformer.translate(pipeline);
+    LOG.info("UserGraphContext=\n{}", context.getUserGraphContext());
+    LOG.info("ExecutionGraphContext=\n{}", context.getExecutionGraphContext());
+
+    for (Stream stream : context.getExecutionGraphContext().getStreams()) {
+      LOG.info(
+          stream.getProducer().getComponentId() + " --> " + stream.getConsumer().getComponentId());
     }
 
-    public static JStormRunner fromOptions(PipelineOptions options) {
-        JStormPipelineOptions pipelineOptions = PipelineOptionsValidator.validate(JStormPipelineOptions.class, options);
-        return new JStormRunner(pipelineOptions);
+    String topologyName = options.getJobName();
+    Config config = convertPipelineOptionsToConfig(options);
+
+    return runTopology(
+        topologyName,
+        getTopology(options, context.getExecutionGraphContext()),
+        config);
+  }
+
+  private JStormRunnerResult runTopology(
+      String topologyName,
+      StormTopology topology,
+      Config config) {
+    try {
+      if (StormConfig.local_mode(config)) {
+        LocalCluster localCluster = LocalCluster.getInstance();
+        localCluster.submitTopology(topologyName, config, topology);
+        return JStormRunnerResult.local(
+            topologyName, config, localCluster, options.getLocalModeExecuteTime());
+      } else {
+        StormSubmitter.submitTopology(topologyName, config, topology);
+        return null;
+      }
+    } catch (Exception e) {
+      LOG.warn("Fail to submit topology", e);
+      throw new RuntimeException("Fail to submit topology", e);
     }
-
-    /**
-     * convert pipeline options to storm configuration format
-     * @param options
-     * @return
-     */
-    private Config convertPipelineOptionsToConfig(JStormPipelineOptions options) {
-        Config config = new Config();
-        if (options.getLocalMode())
-            config.put(Config.STORM_CLUSTER_MODE, "local");
-        else
-            config.put(Config.STORM_CLUSTER_MODE, "distributed");
-
-        Config.setNumWorkers(config, options.getWorkerNumber());
-
-        config.putAll(options.getTopologyConfig());
-
-        // Setup config for runtime env
-        config.put("worker.external", "beam");
-        config.put("topology.acker.executors", 0);
-
-        UnmodifiableCollectionsSerializer.registerSerializers(config);
-        // register classes of guava utils, ImmutableList, ImmutableSet, ImmutableMap
-        ImmutableListSerializer.registerSerializers(config);
-        SdkRepackImmuListSerializer.registerSerializers(config);
-        ImmutableSetSerializer.registerSerializers(config);
-        SdkRepackImmuSetSerializer.registerSerializers(config);
-        ImmutableMapSerializer.registerSerializers(config);
-
-        config.registerDefaultSerailizer(KvStoreIterable.class, KvStoreIterableSerializer.class);
-        return config;
+  }
+
+  private AbstractComponent getComponent(
+      String id, TranslationContext.ExecutionGraphContext context) {
+    AbstractComponent component = null;
+    AdaptorBasicSpout spout = context.getSpout(id);
+    if (spout != null) {
+      component = spout;
+    } else {
+      AdaptorBasicBolt bolt = context.getBolt(id);
+      if (bolt != null)
+        component = bolt;
     }
 
-    @Override
-    public JStormRunnerResult run(Pipeline pipeline) {
-        LOG.info("Running pipeline...");
-        TranslationContext context = new TranslationContext(this.options);
-        StormPipelineTranslator transformer = new StormPipelineTranslator(context);
-        transformer.translate(pipeline);
-        LOG.info("UserGraphContext=\n{}", context.getUserGraphContext());
-        LOG.info("ExecutionGraphContext=\n{}", context.getExecutionGraphContext());
-
-        for (Stream stream : context.getExecutionGraphContext().getStreams()) {
-            LOG.info(stream.getProducer().getComponentId() + " --> " + stream.getConsumer().getComponentId());
-        }
+    return component;
+  }
 
-        String topologyName = options.getJobName();
-        Config config = convertPipelineOptionsToConfig(options);
+  private StormTopology getTopology(
+      JStormPipelineOptions options, TranslationContext.ExecutionGraphContext context) {
+    boolean isExactlyOnce = options.getExactlyOnceTopology();
+    TopologyBuilder builder =
+        isExactlyOnce ? new TransactionTopologyBuilder() : new TopologyBuilder();
 
-        return runTopology(
-            topologyName,
-            getTopology(options, context.getExecutionGraphContext()),
-            config);
+    int parallelismNumber = options.getParallelismNumber();
+    Map<String, AdaptorBasicSpout> spouts = context.getSpouts();
+    for (String id : spouts.keySet()) {
+      IRichSpout spout = getSpout(isExactlyOnce, spouts.get(id));
+      builder.setSpout(id, spout, getParallelismNum(spouts.get(id), parallelismNumber));
     }
 
-    private JStormRunnerResult runTopology(String topologyName, StormTopology topology, Config config) {
-        try {
-            if (StormConfig.local_mode(config)) {
-                LocalCluster localCluster = LocalCluster.getInstance();
-                localCluster.submitTopology(topologyName, config, topology);
-                return JStormRunnerResult.local(
-                    topologyName, config, localCluster, options.getLocalModeExecuteTime());
-            } else {
-                StormSubmitter.submitTopology(topologyName, config, topology);
-                return null;
-            }
-        } catch (Exception e) {
-            LOG.warn("Fail to submit topology", e);
-            throw new RuntimeException("Fail to submit topology", e);
-        }
+    HashMap<String, BoltDeclarer> declarers = new HashMap<>();
+    Iterable<Stream> streams = context.getStreams();
+    LOG.info("streams=" + streams);
+    for (Stream stream : streams) {
+      String destBoltId = stream.getConsumer().getComponentId();
+      IRichBolt bolt = getBolt(isExactlyOnce, context.getBolt(destBoltId));
+      BoltDeclarer declarer = declarers.get(destBoltId);
+      if (declarer == null) {
+        declarer = builder.setBolt(
+            destBoltId,
+            bolt,
+            getParallelismNum(context.getBolt(destBoltId), parallelismNumber));
+        declarers.put(destBoltId, declarer);
+      }
+
+      Stream.Grouping grouping = stream.getConsumer().getGrouping();
+      String streamId = stream.getProducer().getStreamId();
+      String srcBoltId = stream.getProducer().getComponentId();
+
+      // add stream output declare for "from" component
+      AbstractComponent component = getComponent(srcBoltId, context);
+      if (grouping.getType().equals(Stream.Grouping.Type.FIELDS))
+        component.addKVOutputField(streamId);
+      else
+        component.addOutputField(streamId);
+
+      // "to" component declares grouping to "from" component
+      switch (grouping.getType()) {
+        case SHUFFLE:
+          declarer.shuffleGrouping(srcBoltId, streamId);
+          break;
+        case FIELDS:
+          declarer.fieldsGrouping(srcBoltId, streamId, new Fields(grouping.getFields()));
+          break;
+        case ALL:
+          declarer.allGrouping(srcBoltId, streamId);
+          break;
+        case DIRECT:
+          declarer.directGrouping(srcBoltId, streamId);
+          break;
+        case GLOBAL:
+          declarer.globalGrouping(srcBoltId, streamId);
+          break;
+        case LOCAL_OR_SHUFFLE:
+          declarer.localOrShuffleGrouping(srcBoltId, streamId);
+          break;
+        case NONE:
+          declarer.noneGrouping(srcBoltId, streamId);
+          break;
+        default:
+          throw new UnsupportedOperationException("unsupported grouping type: " + grouping);
+      }
+
+      // Subscribe grouping of water mark stream
+      component.addOutputField(CommonInstance.BEAM_WATERMARK_STREAM_ID);
+      declarer.allGrouping(srcBoltId, CommonInstance.BEAM_WATERMARK_STREAM_ID);
     }
 
-    private AbstractComponent getComponent(String id, TranslationContext.ExecutionGraphContext context) {
-        AbstractComponent component = null;
-        AdaptorBasicSpout spout = context.getSpout(id);
-        if (spout != null) {
-            component = spout;
-        } else {
-            AdaptorBasicBolt bolt = context.getBolt(id);
-            if (bolt != null)
-                component = bolt;
-        }
-
-        return component;
+    if (isExactlyOnce) {
+      ((TransactionTopologyBuilder) builder).enableHdfs();
     }
-
-    private StormTopology getTopology(JStormPipelineOptions options, TranslationContext.ExecutionGraphContext context) {
-        boolean isExactlyOnce = options.getExactlyOnceTopology();
-        TopologyBuilder builder = isExactlyOnce ? new TransactionTopologyBuilder() : new TopologyBuilder();
-
-        int parallelismNumber = options.getParallelismNumber();
-        Map<String, AdaptorBasicSpout> spouts = context.getSpouts();
-        for (String id : spouts.keySet()) {
-            IRichSpout spout = getSpout(isExactlyOnce, spouts.get(id));
-            builder.setSpout(id, spout, getParallelismNum(spouts.get(id), parallelismNumber));
-        }
-
-        HashMap<String, BoltDeclarer> declarers = new HashMap<>();
-        Iterable<Stream> streams = context.getStreams();
-        LOG.info("streams=" + streams);
-        for (Stream stream : streams) {
-            String destBoltId = stream.getConsumer().getComponentId();
-            IRichBolt bolt = getBolt(isExactlyOnce, context.getBolt(destBoltId));
-            BoltDeclarer declarer = declarers.get(destBoltId);
-            if (declarer == null) {
-                declarer = builder.setBolt(destBoltId, bolt,
-                    getParallelismNum(context.getBolt(destBoltId), parallelismNumber));
-                declarers.put(destBoltId, declarer);
-            }
-
-            Stream.Grouping grouping = stream.getConsumer().getGrouping();
-            String streamId = stream.getProducer().getStreamId();
-            String srcBoltId = stream.getProducer().getComponentId();
-
-            // add stream output declare for "from" component
-            AbstractComponent component = getComponent(srcBoltId, context);
-            if (grouping.getType().equals(Stream.Grouping.Type.FIELDS))
-                component.addKVOutputField(streamId);
-            else
-                component.addOutputField(streamId);
-
-            // "to" component declares grouping to "from" component
-            switch (grouping.getType()) {
-                case SHUFFLE:
-                    declarer.shuffleGrouping(srcBoltId, streamId);
-                    break;
-                case FIELDS:
-                    declarer.fieldsGrouping(srcBoltId, streamId, new Fields(grouping.getFields()));
-                    break;
-                case ALL:
-                    declarer.allGrouping(srcBoltId, streamId);
-                    break;
-                case DIRECT:
-                    declarer.directGrouping(srcBoltId, streamId);
-                    break;
-                case GLOBAL:
-                    declarer.globalGrouping(srcBoltId, streamId);
-                    break;
-                case LOCAL_OR_SHUFFLE:
-                    declarer.localOrShuffleGrouping(srcBoltId, streamId);
-                    break;
-                case NONE:
-                    declarer.noneGrouping(srcBoltId, streamId);
-                    break;
-                default:
-                    throw new UnsupportedOperationException("unsupported grouping type: " + grouping);
-            }
-
-            // Subscribe grouping of water mark stream
-            component.addOutputField(CommonInstance.BEAM_WATERMARK_STREAM_ID);
-            declarer.allGrouping(srcBoltId, CommonInstance.BEAM_WATERMARK_STREAM_ID);
-        }
-
-        if (isExactlyOnce) {
-            ((TransactionTopologyBuilder) builder).enableHdfs();
-        }
-        return builder.createTopology();
-    }
-
-    private IRichSpout getSpout(boolean isExactlyOnce, IRichSpout spout) {
-        IRichSpout ret = null;
-        if (isExactlyOnce) {
-            if (spout instanceof UnboundedSourceSpout) {
-                ret = new TxUnboundedSourceSpout((UnboundedSourceSpout) spout);
-            } else {
-                String error = String.format("The specified type(%s) is not supported in exactly once mode yet!", spout.getClass().toString());
-                throw new RuntimeException(error);
-            }
-        } else {
-            ret = spout;
-        }
-        return ret;
-    }
-
-    private IRichBolt getBolt(boolean isExactlyOnce, ExecutorsBolt bolt) {
-        return isExactlyOnce ? new TxExecutorsBolt(bolt) : bolt;
-    }
-
-    /**
-     * Calculate the final parallelism number according to the configured number and global number.
-     * @param component
-     * @param globalParallelismNum
-     * @return final parallelism number for the specified component
-     */
-    private int getParallelismNum(AbstractComponent component, int globalParallelismNum) {
-        int configParallelismNum = component.getParallelismNum();
-        return configParallelismNum > 0 ? configParallelismNum : globalParallelismNum;
+    return builder.createTopology();
+  }
+
+  private IRichSpout getSpout(boolean isExactlyOnce, IRichSpout spout) {
+    IRichSpout ret = null;
+    if (isExactlyOnce) {
+      if (spout instanceof UnboundedSourceSpout) {
+        ret = new TxUnboundedSourceSpout((UnboundedSourceSpout) spout);
+      } else {
+        String error = String.format(
+            "The specified type(%s) is not supported in exactly once mode yet!",
+            spout.getClass().toString());
+        throw new RuntimeException(error);
+      }
+    } else {
+      ret = spout;
     }
+    return ret;
+  }
+
+  private IRichBolt getBolt(boolean isExactlyOnce, ExecutorsBolt bolt) {
+    return isExactlyOnce ? new TxExecutorsBolt(bolt) : bolt;
+  }
+
+  /**
+   * Calculate the final parallelism number according to the configured number and global number.
+   *
+   * @param component
+   * @param globalParallelismNum
+   * @return final parallelism number for the specified component
+   */
+  private int getParallelismNum(AbstractComponent component, int globalParallelismNum) {
+    int configParallelismNum = component.getParallelismNum();
+    return configParallelismNum > 0 ? configParallelismNum : globalParallelismNum;
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerRegistrar.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerRegistrar.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerRegistrar.java
index 465236b..1b4d283 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerRegistrar.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerRegistrar.java
@@ -29,27 +29,28 @@ import org.apache.beam.sdk.runners.PipelineRunnerRegistrar;
  * {@link JStormRunner}.
  */
 public class JStormRunnerRegistrar {
-    private JStormRunnerRegistrar() {}
+  private JStormRunnerRegistrar() {
+  }
 
-    /**
-     * Register the {@link JStormPipelineOptions}.
-     */
-    @AutoService(PipelineOptionsRegistrar.class)
-    public static class Options implements PipelineOptionsRegistrar {
-        @Override
-        public Iterable<Class<? extends PipelineOptions>> getPipelineOptions() {
-            return ImmutableList.<Class<? extends PipelineOptions>> of(JStormPipelineOptions.class);
-        }
+  /**
+   * Register the {@link JStormPipelineOptions}.
+   */
+  @AutoService(PipelineOptionsRegistrar.class)
+  public static class Options implements PipelineOptionsRegistrar {
+    @Override
+    public Iterable<Class<? extends PipelineOptions>> getPipelineOptions() {
+      return ImmutableList.<Class<? extends PipelineOptions>>of(JStormPipelineOptions.class);
     }
+  }
 
-    /**
-     * Register the {@link JStormRunner}.
-     */
-    @AutoService(PipelineRunnerRegistrar.class)
-    public static class Runner implements PipelineRunnerRegistrar {
-        @Override
-        public Iterable<Class<? extends PipelineRunner<?>>> getPipelineRunners() {
-            return ImmutableList.<Class<? extends PipelineRunner<?>>> of(JStormRunner.class);
-        }
+  /**
+   * Register the {@link JStormRunner}.
+   */
+  @AutoService(PipelineRunnerRegistrar.class)
+  public static class Runner implements PipelineRunnerRegistrar {
+    @Override
+    public Iterable<Class<? extends PipelineRunner<?>>> getPipelineRunners() {
+      return ImmutableList.<Class<? extends PipelineRunner<?>>>of(JStormRunner.class);
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerResult.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerResult.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerResult.java
index e15ee6d..797c899 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerResult.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/JStormRunnerResult.java
@@ -33,76 +33,76 @@ import org.joda.time.Duration;
  */
 public abstract class JStormRunnerResult implements PipelineResult {
 
-    public static JStormRunnerResult local(
+  public static JStormRunnerResult local(
+      String topologyName,
+      Config config,
+      LocalCluster localCluster,
+      long localModeExecuteTimeSecs) {
+    return new LocalStormPipelineResult(
+        topologyName, config, localCluster, localModeExecuteTimeSecs);
+  }
+
+  private final String topologyName;
+  private final Config config;
+
+  JStormRunnerResult(String topologyName, Config config) {
+    this.config = checkNotNull(config, "config");
+    this.topologyName = checkNotNull(topologyName, "topologyName");
+  }
+
+  public State getState() {
+    return null;
+  }
+
+  public Config getConfig() {
+    return config;
+  }
+
+  public String getTopologyName() {
+    return topologyName;
+  }
+
+  private static class LocalStormPipelineResult extends JStormRunnerResult {
+
+    private LocalCluster localCluster;
+    private long localModeExecuteTimeSecs;
+
+    LocalStormPipelineResult(
         String topologyName,
         Config config,
         LocalCluster localCluster,
         long localModeExecuteTimeSecs) {
-        return new LocalStormPipelineResult(
-            topologyName, config, localCluster, localModeExecuteTimeSecs);
-    }
-
-    private final String topologyName;
-    private final Config config;
-
-    JStormRunnerResult(String topologyName, Config config) {
-        this.config = checkNotNull(config, "config");
-        this.topologyName = checkNotNull(topologyName, "topologyName");
+      super(topologyName, config);
+      this.localCluster = checkNotNull(localCluster, "localCluster");
     }
 
-    public State getState() {
-        return null;
+    @Override
+    public State cancel() throws IOException {
+      //localCluster.deactivate(getTopologyName());
+      localCluster.killTopology(getTopologyName());
+      localCluster.shutdown();
+      JStormUtils.sleepMs(1000);
+      return State.CANCELLED;
     }
 
-    public Config getConfig() {
-        return config;
+    @Override
+    public State waitUntilFinish(Duration duration) {
+      return waitUntilFinish();
     }
 
-    public String getTopologyName() {
-        return topologyName;
+    @Override
+    public State waitUntilFinish() {
+      JStormUtils.sleepMs(localModeExecuteTimeSecs * 1000);
+      try {
+        return cancel();
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
     }
 
-    private static class LocalStormPipelineResult extends JStormRunnerResult {
-
-        private LocalCluster localCluster;
-        private long localModeExecuteTimeSecs;
-
-        LocalStormPipelineResult(
-            String topologyName,
-            Config config,
-            LocalCluster localCluster,
-            long localModeExecuteTimeSecs) {
-            super(topologyName, config);
-            this.localCluster = checkNotNull(localCluster, "localCluster");
-        }
-
-        @Override
-        public State cancel() throws IOException {
-          //localCluster.deactivate(getTopologyName());
-          localCluster.killTopology(getTopologyName());
-          localCluster.shutdown();
-          JStormUtils.sleepMs(1000);
-          return State.CANCELLED;
-        }
-
-        @Override
-        public State waitUntilFinish(Duration duration) {
-            return waitUntilFinish();
-        }
-
-        @Override
-        public State waitUntilFinish() {
-            JStormUtils.sleepMs(localModeExecuteTimeSecs * 1000);
-            try {
-                return cancel();
-            } catch (IOException e) {
-                throw new RuntimeException(e);
-            }
-        }
-
-        @Override
-        public MetricResults metrics() {
-            return null;
-        }
+    @Override
+    public MetricResults metrics() {
+      return null;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
index b7ff4eb..e27efc0 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
@@ -1,10 +1,19 @@
 package org.apache.beam.runners.jstorm;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 import avro.shaded.com.google.common.collect.Maps;
 import com.alibaba.jstorm.common.metric.AsmMetric;
-import com.alibaba.jstorm.metric.*;
+import com.alibaba.jstorm.metric.AsmMetricRegistry;
+import com.alibaba.jstorm.metric.AsmWindow;
+import com.alibaba.jstorm.metric.JStormMetrics;
+import com.alibaba.jstorm.metric.MetaType;
+import com.alibaba.jstorm.metric.MetricType;
 import com.alibaba.jstorm.utils.JStormUtils;
 import com.google.common.base.Optional;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.PipelineRunner;
 import org.apache.beam.sdk.options.PipelineOptions;
@@ -12,109 +21,106 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Map;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
 /**
  * Test JStorm runner.
  */
 public class TestJStormRunner extends PipelineRunner<JStormRunnerResult> {
 
-    private static final Logger LOG = LoggerFactory.getLogger(TestJStormRunner.class);
-
-    public static TestJStormRunner fromOptions(PipelineOptions options) {
-        return new TestJStormRunner(options.as(JStormPipelineOptions.class));
+  private static final Logger LOG = LoggerFactory.getLogger(TestJStormRunner.class);
+
+  public static TestJStormRunner fromOptions(PipelineOptions options) {
+    return new TestJStormRunner(options.as(JStormPipelineOptions.class));
+  }
+
+  private final JStormRunner stormRunner;
+  private final JStormPipelineOptions options;
+
+  private TestJStormRunner(JStormPipelineOptions options) {
+    this.options = options;
+    Map conf = Maps.newHashMap();
+    //conf.put(ConfigExtension.KV_STORE_TYPE, KvStoreManagerFactory.KvStoreType.memory.toString());
+    options.setTopologyConfig(conf);
+    options.setLocalMode(true);
+    stormRunner = JStormRunner.fromOptions(checkNotNull(options, "options"));
+  }
+
+  @Override
+  public JStormRunnerResult run(Pipeline pipeline) {
+    JStormRunnerResult result = stormRunner.run(pipeline);
+
+    try {
+      int numberOfAssertions = PAssert.countAsserts(pipeline);
+
+      LOG.info("Running JStorm job {} with {} expected assertions.",
+               result.getTopologyName(), numberOfAssertions);
+      if (numberOfAssertions == 0) {
+        // If assert number is zero, wait 5 sec
+        JStormUtils.sleepMs(5000);
+        return result;
+      } else {
+        for (int i = 0; i < 40; ++i) {
+          Optional<Boolean> success = checkForPAssertSuccess(numberOfAssertions);
+          if (success.isPresent() && success.get()) {
+            return result;
+          } else if (success.isPresent() && !success.get()) {
+            throw new AssertionError("Failed assertion checks.");
+          } else {
+            JStormUtils.sleepMs(500);
+          }
+        }
+        LOG.info("Assertion checks timed out.");
+        throw new AssertionError("Assertion checks timed out.");
+      }
+    } finally {
+      clearPAssertCount();
+      cancel(result);
     }
+  }
 
-    private final JStormRunner stormRunner;
-    private final JStormPipelineOptions options;
-
-    private TestJStormRunner(JStormPipelineOptions options) {
-        this.options = options;
-        Map conf = Maps.newHashMap();
-        //conf.put(ConfigExtension.KV_STORE_TYPE, KvStoreManagerFactory.KvStoreType.memory.toString());
-        options.setTopologyConfig(conf);
-        options.setLocalMode(true);
-        stormRunner = JStormRunner.fromOptions(checkNotNull(options, "options"));
+  private Optional<Boolean> checkForPAssertSuccess(int expectedNumberOfAssertions) {
+    int successes = 0;
+    for (AsmMetric metric :
+        JStormMetrics.search(PAssert.SUCCESS_COUNTER, MetaType.TASK, MetricType.COUNTER)) {
+      successes += ((Long) metric.getValue(AsmWindow.M1_WINDOW)).intValue();
     }
-
-    @Override
-    public JStormRunnerResult run(Pipeline pipeline) {
-        JStormRunnerResult result = stormRunner.run(pipeline);
-
-        try {
-            int numberOfAssertions = PAssert.countAsserts(pipeline);
-
-            LOG.info("Running JStorm job {} with {} expected assertions.", result.getTopologyName(), numberOfAssertions);
-            if(numberOfAssertions == 0) {
-                // If assert number is zero, wait 5 sec
-                JStormUtils.sleepMs(5000);
-                return result;
-            } else {
-                for (int i = 0; i < 40; ++i) {
-                    Optional<Boolean> success = checkForPAssertSuccess(numberOfAssertions);
-                    if (success.isPresent() && success.get()) {
-                        return result;
-                    } else if (success.isPresent() && !success.get()) {
-                        throw new AssertionError("Failed assertion checks.");
-                    } else {
-                        JStormUtils.sleepMs(500);
-                    }
-                }
-                LOG.info("Assertion checks timed out.");
-                throw new AssertionError("Assertion checks timed out.");
-            }
-        } finally {
-            clearPAssertCount();
-            cancel(result);
-        }
+    int failures = 0;
+    for (AsmMetric metric :
+        JStormMetrics.search(PAssert.FAILURE_COUNTER, MetaType.TASK, MetricType.COUNTER)) {
+      failures += ((Long) metric.getValue(AsmWindow.M1_WINDOW)).intValue();
     }
 
-    private Optional<Boolean> checkForPAssertSuccess(int expectedNumberOfAssertions) {
-        int successes = 0;
-        for (AsmMetric metric : JStormMetrics.search(PAssert.SUCCESS_COUNTER, MetaType.TASK, MetricType.COUNTER)) {
-            successes += ((Long) metric.getValue(AsmWindow.M1_WINDOW)).intValue();
-        }
-        int failures = 0;
-        for (AsmMetric metric : JStormMetrics.search(PAssert.FAILURE_COUNTER, MetaType.TASK, MetricType.COUNTER)) {
-            failures += ((Long) metric.getValue(AsmWindow.M1_WINDOW)).intValue();
-        }
-
-        if (failures > 0) {
-            LOG.info("Found {} success, {} failures out of {} expected assertions.",
-                    successes, failures, expectedNumberOfAssertions);
-            return Optional.of(false);
-        } else if (successes >= expectedNumberOfAssertions) {
-            LOG.info("Found {} success, {} failures out of {} expected assertions.",
-                    successes, failures, expectedNumberOfAssertions);
-            return Optional.of(true);
-        }
-
-        LOG.info("Found {} success, {} failures out of {} expected assertions.",
-                successes, failures, expectedNumberOfAssertions);
-        return Optional.absent();
+    if (failures > 0) {
+      LOG.info("Found {} success, {} failures out of {} expected assertions.",
+               successes, failures, expectedNumberOfAssertions);
+      return Optional.of(false);
+    } else if (successes >= expectedNumberOfAssertions) {
+      LOG.info("Found {} success, {} failures out of {} expected assertions.",
+               successes, failures, expectedNumberOfAssertions);
+      return Optional.of(true);
     }
 
-    private void clearPAssertCount() {
-        String topologyName = options.getJobName();
-        AsmMetricRegistry taskMetrics = JStormMetrics.getTaskMetrics();
-        Iterator<Map.Entry<String, AsmMetric>> itr = taskMetrics.getMetrics().entrySet().iterator();
-        while (itr.hasNext()) {
-            Map.Entry<String, AsmMetric> metric = itr.next();
-            if (metric.getKey().contains(topologyName)) {
-                itr.remove();
-            }
-        }
+    LOG.info("Found {} success, {} failures out of {} expected assertions.",
+             successes, failures, expectedNumberOfAssertions);
+    return Optional.absent();
+  }
+
+  private void clearPAssertCount() {
+    String topologyName = options.getJobName();
+    AsmMetricRegistry taskMetrics = JStormMetrics.getTaskMetrics();
+    Iterator<Map.Entry<String, AsmMetric>> itr = taskMetrics.getMetrics().entrySet().iterator();
+    while (itr.hasNext()) {
+      Map.Entry<String, AsmMetric> metric = itr.next();
+      if (metric.getKey().contains(topologyName)) {
+        itr.remove();
+      }
     }
+  }
 
-    private void cancel(JStormRunnerResult result) {
-        try {
-            result.cancel();
-        } catch (IOException e) {
-            throw new RuntimeException("Failed to cancel.", e);
-}
+  private void cancel(JStormRunnerResult result) {
+    try {
+      result.cancel();
+    } catch (IOException e) {
+      throw new RuntimeException("Failed to cancel.", e);
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
index aa7d325..fa4eeb6 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableListSerializer.java
@@ -1,92 +1,108 @@
 package org.apache.beam.runners.jstorm.serialization;
 
 import backtype.storm.Config;
-import org.apache.beam.runners.jstorm.util.RunnerUtils;
 import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
 import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import com.google.common.collect.*;
+import com.google.common.collect.HashBasedTable;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Table;
+import org.apache.beam.runners.jstorm.util.RunnerUtils;
 
 public class ImmutableListSerializer extends Serializer<ImmutableList<Object>> {
 
-    private static final boolean DOES_NOT_ACCEPT_NULL = false;
-    private static final boolean IMMUTABLE = true;
+  private static final boolean DOES_NOT_ACCEPT_NULL = false;
+  private static final boolean IMMUTABLE = true;
 
-    public ImmutableListSerializer() {
-        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-    }
+  public ImmutableListSerializer() {
+    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+  }
 
-    @Override
-    public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
-        output.writeInt(object.size(), true);
-        for (Object elm : object) {
-            kryo.writeClassAndObject(output, elm);
-        }
+  @Override
+  public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
+    output.writeInt(object.size(), true);
+    for (Object elm : object) {
+      kryo.writeClassAndObject(output, elm);
     }
+  }
 
-    @Override
-    public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
-        final int size = input.readInt(true);
-        final Object[] list = new Object[size];
-        for (int i = 0; i < size; ++i) {
-            list[i] = kryo.readClassAndObject(input);
-        }
-        return ImmutableList.copyOf(list);
+  @Override
+  public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
+    final int size = input.readInt(true);
+    final Object[] list = new Object[size];
+    for (int i = 0; i < size; ++i) {
+      list[i] = kryo.readClassAndObject(input);
     }
+    return ImmutableList.copyOf(list);
+  }
 
-    /**
-     * Creates a new {@link ImmutableListSerializer} and registers its serializer
-     * for the several ImmutableList related classes.
-     */
-    public static void registerSerializers(Config config) {
+  /**
+   * Creates a new {@link ImmutableListSerializer} and registers its serializer
+   * for the several ImmutableList related classes.
+   */
+  public static void registerSerializers(Config config) {
 
-        // ImmutableList (abstract class)
-        //  +- RegularImmutableList
-        //  |   RegularImmutableList
-        //  +- SingletonImmutableList
-        //  |   Optimized for List with only 1 element.
-        //  +- SubList
-        //  |   Representation for part of ImmutableList
-        //  +- ReverseImmutableList
-        //  |   For iterating in reverse order
-        //  +- StringAsImmutableList
-        //  |   Used by Lists#charactersOf
-        //  +- Values (ImmutableTable values)
-        //      Used by return value of #values() when there are multiple cells
+    // ImmutableList (abstract class)
+    //  +- RegularImmutableList
+    //  |   RegularImmutableList
+    //  +- SingletonImmutableList
+    //  |   Optimized for List with only 1 element.
+    //  +- SubList
+    //  |   Representation for part of ImmutableList
+    //  +- ReverseImmutableList
+    //  |   For iterating in reverse order
+    //  +- StringAsImmutableList
+    //  |   Used by Lists#charactersOf
+    //  +- Values (ImmutableTable values)
+    //      Used by return value of #values() when there are multiple cells
 
-        config.registerSerialization(ImmutableList.class, ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(ImmutableList.class), ImmutableListSerializer.class);
+    config.registerSerialization(ImmutableList.class, ImmutableListSerializer.class);
+    config.registerSerialization(
+        RunnerUtils.getBeamSdkRepackClass(ImmutableList.class), ImmutableListSerializer.class);
 
-        // Note:
-        //  Only registering above is good enough for serializing/deserializing.
-        //  but if using Kryo#copy, following is required.
+    // Note:
+    //  Only registering above is good enough for serializing/deserializing.
+    //  but if using Kryo#copy, following is required.
 
-        config.registerSerialization(ImmutableList.of().getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of().getClass()), ImmutableListSerializer.class);
-        config.registerSerialization(ImmutableList.of(1).getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of(1).getClass()), ImmutableListSerializer.class);
-        config.registerSerialization(ImmutableList.of(1,2,3).subList(1, 2).getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of(1,2,3).subList(1, 2).getClass()), ImmutableListSerializer.class);
-        config.registerSerialization(ImmutableList.of().reverse().getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(ImmutableList.of().reverse().getClass()), ImmutableListSerializer.class);
+    config.registerSerialization(ImmutableList.of().getClass(), ImmutableListSerializer.class);
+    config.registerSerialization(
+        RunnerUtils.getBeamSdkRepackClass(ImmutableList.of().getClass()),
+        ImmutableListSerializer.class);
+    config.registerSerialization(ImmutableList.of(1).getClass(), ImmutableListSerializer.class);
+    config.registerSerialization(
+        RunnerUtils.getBeamSdkRepackClass(ImmutableList.of(1).getClass()),
+        ImmutableListSerializer.class);
+    config.registerSerialization(
+        ImmutableList.of(1, 2, 3).subList(1, 2).getClass(),
+        ImmutableListSerializer.class);
+    config.registerSerialization(
+        RunnerUtils.getBeamSdkRepackClass(ImmutableList.of(1, 2, 3).subList(1, 2).getClass()),
+        ImmutableListSerializer.class);
+    config.registerSerialization(
+        ImmutableList.of().reverse().getClass(),
+        ImmutableListSerializer.class);
+    config.registerSerialization(
+        RunnerUtils.getBeamSdkRepackClass(ImmutableList.of().reverse().getClass()),
+        ImmutableListSerializer.class);
 
-        config.registerSerialization(Lists.charactersOf("KryoRocks").getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(Lists.charactersOf("KryoRocks").getClass()), ImmutableListSerializer.class);
+    config.registerSerialization(
+        Lists.charactersOf("KryoRocks").getClass(),
+        ImmutableListSerializer.class);
+    config.registerSerialization(
+        RunnerUtils.getBeamSdkRepackClass(Lists.charactersOf("KryoRocks").getClass()),
+        ImmutableListSerializer.class);
 
-        Table<Integer,Integer,Integer> baseTable = HashBasedTable.create();
-        baseTable.put(1, 2, 3);
-        baseTable.put(4, 5, 6);
-        Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
-        config.registerSerialization(table.values().getClass(), ImmutableListSerializer.class);
-        config.registerSerialization(
-                RunnerUtils.getBeamSdkRepackClass(table.values().getClass()), ImmutableListSerializer.class);
+    Table<Integer, Integer, Integer> baseTable = HashBasedTable.create();
+    baseTable.put(1, 2, 3);
+    baseTable.put(4, 5, 6);
+    Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
+    config.registerSerialization(table.values().getClass(), ImmutableListSerializer.class);
+    config.registerSerialization(
+        RunnerUtils.getBeamSdkRepackClass(table.values().getClass()),
+        ImmutableListSerializer.class);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
index ee8b765..77eede3 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableMapSerializer.java
@@ -7,55 +7,61 @@ import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
-
 import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.Map;
 
 public class ImmutableMapSerializer extends Serializer<ImmutableMap<Object, ? extends Object>> {
 
-    private static final boolean DOES_NOT_ACCEPT_NULL = true;
-    private static final boolean IMMUTABLE = true;
+  private static final boolean DOES_NOT_ACCEPT_NULL = true;
+  private static final boolean IMMUTABLE = true;
 
-    public ImmutableMapSerializer() {
-        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-    }
+  public ImmutableMapSerializer() {
+    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+  }
 
-    @Override
-    public void write(Kryo kryo, Output output, ImmutableMap<Object, ? extends Object> immutableMap) {
-        kryo.writeObject(output, Maps.newHashMap(immutableMap));
-    }
+  @Override
+  public void write(Kryo kryo, Output output, ImmutableMap<Object, ? extends Object> immutableMap) {
+    kryo.writeObject(output, Maps.newHashMap(immutableMap));
+  }
 
-    @Override
-    public ImmutableMap<Object, Object> read(Kryo kryo, Input input, Class<ImmutableMap<Object, ? extends Object>> type) {
-        Map map = kryo.readObject(input, HashMap.class);
-        return ImmutableMap.copyOf(map);
-    }
+  @Override
+  public ImmutableMap<Object, Object> read(
+      Kryo kryo,
+      Input input,
+      Class<ImmutableMap<Object, ? extends Object>> type) {
+    Map map = kryo.readObject(input, HashMap.class);
+    return ImmutableMap.copyOf(map);
+  }
 
-    /**
-     * Creates a new {@link ImmutableMapSerializer} and registers its serializer
-     * for the several ImmutableMap related classes.
-     */
-    public static void registerSerializers(Config config) {
+  /**
+   * Creates a new {@link ImmutableMapSerializer} and registers its serializer
+   * for the several ImmutableMap related classes.
+   */
+  public static void registerSerializers(Config config) {
 
-        config.registerSerialization(ImmutableMap.class, ImmutableMapSerializer.class);
-        config.registerSerialization(ImmutableMap.of().getClass(), ImmutableMapSerializer.class);
+    config.registerSerialization(ImmutableMap.class, ImmutableMapSerializer.class);
+    config.registerSerialization(ImmutableMap.of().getClass(), ImmutableMapSerializer.class);
 
-        Object o1 = new Object();
-        Object o2 = new Object();
+    Object o1 = new Object();
+    Object o2 = new Object();
 
-        config.registerSerialization(ImmutableMap.of(o1, o1).getClass(), ImmutableMapSerializer.class);
-        config.registerSerialization(ImmutableMap.of(o1, o1, o2, o2).getClass(), ImmutableMapSerializer.class);
-        Map<DummyEnum,Object> enumMap = new EnumMap<DummyEnum, Object>(DummyEnum.class);
-        for (DummyEnum e : DummyEnum.values()) {
-            enumMap.put(e, o1);
-        }
-
-        config.registerSerialization(ImmutableMap.copyOf(enumMap).getClass(), ImmutableMapSerializer.class);
+    config.registerSerialization(ImmutableMap.of(o1, o1).getClass(), ImmutableMapSerializer.class);
+    config.registerSerialization(
+        ImmutableMap.of(o1, o1, o2, o2).getClass(),
+        ImmutableMapSerializer.class);
+    Map<DummyEnum, Object> enumMap = new EnumMap<DummyEnum, Object>(DummyEnum.class);
+    for (DummyEnum e : DummyEnum.values()) {
+      enumMap.put(e, o1);
     }
 
-    private enum DummyEnum {
-        VALUE1,
-        VALUE2
-    }
+    config.registerSerialization(
+        ImmutableMap.copyOf(enumMap).getClass(),
+        ImmutableMapSerializer.class);
+  }
+
+  private enum DummyEnum {
+    VALUE1,
+    VALUE2
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
index cdc4382..3a43b2b 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/ImmutableSetSerializer.java
@@ -10,62 +10,63 @@ import com.google.common.collect.Sets;
 
 public class ImmutableSetSerializer extends Serializer<ImmutableSet<Object>> {
 
-    private static final boolean DOES_NOT_ACCEPT_NULL = false;
-    private static final boolean IMMUTABLE = true;
+  private static final boolean DOES_NOT_ACCEPT_NULL = false;
+  private static final boolean IMMUTABLE = true;
 
-    public ImmutableSetSerializer() {
-        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-    }
+  public ImmutableSetSerializer() {
+    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+  }
 
-    @Override
-    public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
-        output.writeInt(object.size(), true);
-        for (Object elm : object) {
-            kryo.writeClassAndObject(output, elm);
-        }
+  @Override
+  public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
+    output.writeInt(object.size(), true);
+    for (Object elm : object) {
+      kryo.writeClassAndObject(output, elm);
     }
+  }
 
-    @Override
-    public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
-        final int size = input.readInt(true);
-        ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
-        for (int i = 0; i < size; ++i) {
-            builder.add(kryo.readClassAndObject(input));
-        }
-        return builder.build();
+  @Override
+  public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
+    final int size = input.readInt(true);
+    ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
+    for (int i = 0; i < size; ++i) {
+      builder.add(kryo.readClassAndObject(input));
     }
+    return builder.build();
+  }
 
-    /**
-     * Creates a new {@link ImmutableSetSerializer} and registers its serializer
-     * for the several ImmutableSet related classes.
-     */
-    public static void registerSerializers(Config config) {
+  /**
+   * Creates a new {@link ImmutableSetSerializer} and registers its serializer
+   * for the several ImmutableSet related classes.
+   */
+  public static void registerSerializers(Config config) {
 
-        // ImmutableList (abstract class)
-        //  +- EmptyImmutableSet
-        //  |   EmptyImmutableSet
-        //  +- SingletonImmutableSet
-        //  |   Optimized for Set with only 1 element.
-        //  +- RegularImmutableSet
-        //  |   RegularImmutableList
-        //  +- EnumImmutableSet
-        //  |   EnumImmutableSet
+    // ImmutableList (abstract class)
+    //  +- EmptyImmutableSet
+    //  |   EmptyImmutableSet
+    //  +- SingletonImmutableSet
+    //  |   Optimized for Set with only 1 element.
+    //  +- RegularImmutableSet
+    //  |   RegularImmutableList
+    //  +- EnumImmutableSet
+    //  |   EnumImmutableSet
 
-        config.registerSerialization(ImmutableSet.class, ImmutableSetSerializer.class);
+    config.registerSerialization(ImmutableSet.class, ImmutableSetSerializer.class);
 
-        // Note:
-        //  Only registering above is good enough for serializing/deserializing.
-        //  but if using Kryo#copy, following is required.
+    // Note:
+    //  Only registering above is good enough for serializing/deserializing.
+    //  but if using Kryo#copy, following is required.
 
-        config.registerSerialization(ImmutableSet.of().getClass(), ImmutableSetSerializer.class);
-        config.registerSerialization(ImmutableSet.of(1).getClass(), ImmutableSetSerializer.class);
-        config.registerSerialization(ImmutableSet.of(1,2,3).getClass(), ImmutableSetSerializer.class);
+    config.registerSerialization(ImmutableSet.of().getClass(), ImmutableSetSerializer.class);
+    config.registerSerialization(ImmutableSet.of(1).getClass(), ImmutableSetSerializer.class);
+    config.registerSerialization(ImmutableSet.of(1, 2, 3).getClass(), ImmutableSetSerializer.class);
 
-        config.registerSerialization(
-                Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(), ImmutableSetSerializer.class);
-    }
+    config.registerSerialization(
+        Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(),
+        ImmutableSetSerializer.class);
+  }
 
-    private enum SomeEnum {
-        A, B, C
-    }
+  private enum SomeEnum {
+    A, B, C
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
index decfb3f..b47f3b7 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/KvStoreIterableSerializer.java
@@ -6,50 +6,49 @@ import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
 import com.google.common.collect.Lists;
-
 import java.util.Iterator;
 import java.util.List;
 
 public class KvStoreIterableSerializer extends Serializer<KvStoreIterable<Object>> {
 
-    public KvStoreIterableSerializer() {
+  public KvStoreIterableSerializer() {
 
-    }
+  }
 
-    @Override
-    public void write(Kryo kryo, Output output, KvStoreIterable<Object> object) {
-        List<Object> values = Lists.newArrayList(object);
-        output.writeInt(values.size(), true);
-        for (Object elm : object) {
-            kryo.writeClassAndObject(output, elm);
-        }
+  @Override
+  public void write(Kryo kryo, Output output, KvStoreIterable<Object> object) {
+    List<Object> values = Lists.newArrayList(object);
+    output.writeInt(values.size(), true);
+    for (Object elm : object) {
+      kryo.writeClassAndObject(output, elm);
     }
-
-    @Override
-    public KvStoreIterable<Object> read(Kryo kryo, Input input, Class<KvStoreIterable<Object>> type) {
-        final int size = input.readInt(true);
-        List<Object> values = Lists.newArrayList();
-        for (int i = 0; i < size; ++i) {
-            values.add(kryo.readClassAndObject(input));
-        }
-
-        return new KvStoreIterable<Object>() {
-            Iterable<Object> values;
-
-            @Override
-            public Iterator<Object> iterator() {
-                return values.iterator();
-            }
-
-            public KvStoreIterable init(Iterable<Object> values) {
-                this.values = values;
-                return this;
-            }
-
-            @Override
-            public String toString() {
-                return values.toString();
-            }
-        }.init(values);
+  }
+
+  @Override
+  public KvStoreIterable<Object> read(Kryo kryo, Input input, Class<KvStoreIterable<Object>> type) {
+    final int size = input.readInt(true);
+    List<Object> values = Lists.newArrayList();
+    for (int i = 0; i < size; ++i) {
+      values.add(kryo.readClassAndObject(input));
     }
+
+    return new KvStoreIterable<Object>() {
+      Iterable<Object> values;
+
+      @Override
+      public Iterator<Object> iterator() {
+        return values.iterator();
+      }
+
+      public KvStoreIterable init(Iterable<Object> values) {
+        this.values = values;
+        return this;
+      }
+
+      @Override
+      public String toString() {
+        return values.toString();
+      }
+    }.init(values);
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
index 9bb315b..dd4272c 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuListSerializer.java
@@ -6,73 +6,83 @@ import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
 import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.*;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.HashBasedTable;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableList;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableTable;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.Lists;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.Table;
 
 public class SdkRepackImmuListSerializer extends Serializer<ImmutableList<Object>> {
 
-    private static final boolean DOES_NOT_ACCEPT_NULL = false;
-    private static final boolean IMMUTABLE = true;
+  private static final boolean DOES_NOT_ACCEPT_NULL = false;
+  private static final boolean IMMUTABLE = true;
 
-    public SdkRepackImmuListSerializer() {
-        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-    }
+  public SdkRepackImmuListSerializer() {
+    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+  }
 
-    @Override
-    public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
-        output.writeInt(object.size(), true);
-        for (Object elm : object) {
-            kryo.writeClassAndObject(output, elm);
-        }
+  @Override
+  public void write(Kryo kryo, Output output, ImmutableList<Object> object) {
+    output.writeInt(object.size(), true);
+    for (Object elm : object) {
+      kryo.writeClassAndObject(output, elm);
     }
+  }
 
-    @Override
-    public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
-        final int size = input.readInt(true);
-        final Object[] list = new Object[size];
-        for (int i = 0; i < size; ++i) {
-            list[i] = kryo.readClassAndObject(input);
-        }
-        return ImmutableList.copyOf(list);
+  @Override
+  public ImmutableList<Object> read(Kryo kryo, Input input, Class<ImmutableList<Object>> type) {
+    final int size = input.readInt(true);
+    final Object[] list = new Object[size];
+    for (int i = 0; i < size; ++i) {
+      list[i] = kryo.readClassAndObject(input);
     }
+    return ImmutableList.copyOf(list);
+  }
 
-    /**
-     * Creates a new {@link ImmutableListSerializer} and registers its serializer
-     * for the several ImmutableList related classes.
-     */
-    public static void registerSerializers(Config config) {
+  /**
+   * Creates a new {@link ImmutableListSerializer} and registers its serializer
+   * for the several ImmutableList related classes.
+   */
+  public static void registerSerializers(Config config) {
 
-        // ImmutableList (abstract class)
-        //  +- RegularImmutableList
-        //  |   RegularImmutableList
-        //  +- SingletonImmutableList
-        //  |   Optimized for List with only 1 element.
-        //  +- SubList
-        //  |   Representation for part of ImmutableList
-        //  +- ReverseImmutableList
-        //  |   For iterating in reverse order
-        //  +- StringAsImmutableList
-        //  |   Used by Lists#charactersOf
-        //  +- Values (ImmutableTable values)
-        //      Used by return value of #values() when there are multiple cells
+    // ImmutableList (abstract class)
+    //  +- RegularImmutableList
+    //  |   RegularImmutableList
+    //  +- SingletonImmutableList
+    //  |   Optimized for List with only 1 element.
+    //  +- SubList
+    //  |   Representation for part of ImmutableList
+    //  +- ReverseImmutableList
+    //  |   For iterating in reverse order
+    //  +- StringAsImmutableList
+    //  |   Used by Lists#charactersOf
+    //  +- Values (ImmutableTable values)
+    //      Used by return value of #values() when there are multiple cells
 
-        config.registerSerialization(ImmutableList.class, SdkRepackImmuListSerializer.class);
+    config.registerSerialization(ImmutableList.class, SdkRepackImmuListSerializer.class);
 
-        // Note:
-        //  Only registering above is good enough for serializing/deserializing.
-        //  but if using Kryo#copy, following is required.
+    // Note:
+    //  Only registering above is good enough for serializing/deserializing.
+    //  but if using Kryo#copy, following is required.
 
-        config.registerSerialization(ImmutableList.of().getClass(), SdkRepackImmuListSerializer.class);
-        config.registerSerialization(ImmutableList.of(1).getClass(), SdkRepackImmuListSerializer.class);
-        config.registerSerialization(ImmutableList.of(1,2,3).subList(1, 2).getClass(), SdkRepackImmuListSerializer.class);
-        config.registerSerialization(ImmutableList.of().reverse().getClass(), SdkRepackImmuListSerializer.class);
+    config.registerSerialization(ImmutableList.of().getClass(), SdkRepackImmuListSerializer.class);
+    config.registerSerialization(ImmutableList.of(1).getClass(), SdkRepackImmuListSerializer.class);
+    config.registerSerialization(
+        ImmutableList.of(1, 2, 3).subList(1, 2).getClass(),
+        SdkRepackImmuListSerializer.class);
+    config.registerSerialization(
+        ImmutableList.of().reverse().getClass(),
+        SdkRepackImmuListSerializer.class);
 
-        config.registerSerialization(Lists.charactersOf("KryoRocks").getClass(), SdkRepackImmuListSerializer.class);
+    config.registerSerialization(
+        Lists.charactersOf("KryoRocks").getClass(),
+        SdkRepackImmuListSerializer.class);
 
-        Table<Integer,Integer,Integer> baseTable = HashBasedTable.create();
-        baseTable.put(1, 2, 3);
-        baseTable.put(4, 5, 6);
-        Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
-        config.registerSerialization(table.values().getClass(), SdkRepackImmuListSerializer.class);
+    Table<Integer, Integer, Integer> baseTable = HashBasedTable.create();
+    baseTable.put(1, 2, 3);
+    baseTable.put(4, 5, 6);
+    Table<Integer, Integer, Integer> table = ImmutableTable.copyOf(baseTable);
+    config.registerSerialization(table.values().getClass(), SdkRepackImmuListSerializer.class);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
index a514645..6973c82 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/SdkRepackImmuSetSerializer.java
@@ -5,67 +5,71 @@ import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
 import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-import org.apache.beam.sdk.repackaged.com.google.common.collect.*;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.ImmutableSet;
+import org.apache.beam.sdk.repackaged.com.google.common.collect.Sets;
 
 public class SdkRepackImmuSetSerializer extends Serializer<ImmutableSet<Object>> {
 
-    private static final boolean DOES_NOT_ACCEPT_NULL = false;
-    private static final boolean IMMUTABLE = true;
+  private static final boolean DOES_NOT_ACCEPT_NULL = false;
+  private static final boolean IMMUTABLE = true;
 
-    public SdkRepackImmuSetSerializer() {
-        super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
-    }
+  public SdkRepackImmuSetSerializer() {
+    super(DOES_NOT_ACCEPT_NULL, IMMUTABLE);
+  }
 
-    @Override
-    public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
-        output.writeInt(object.size(), true);
-        for (Object elm : object) {
-            kryo.writeClassAndObject(output, elm);
-        }
+  @Override
+  public void write(Kryo kryo, Output output, ImmutableSet<Object> object) {
+    output.writeInt(object.size(), true);
+    for (Object elm : object) {
+      kryo.writeClassAndObject(output, elm);
     }
+  }
 
-    @Override
-    public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
-        final int size = input.readInt(true);
-        ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
-        for (int i = 0; i < size; ++i) {
-            builder.add(kryo.readClassAndObject(input));
-        }
-        return builder.build();
+  @Override
+  public ImmutableSet<Object> read(Kryo kryo, Input input, Class<ImmutableSet<Object>> type) {
+    final int size = input.readInt(true);
+    ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
+    for (int i = 0; i < size; ++i) {
+      builder.add(kryo.readClassAndObject(input));
     }
+    return builder.build();
+  }
 
-    /**
-     * Creates a new {@link ImmutableSetSerializer} and registers its serializer
-     * for the several ImmutableSet related classes.
-     */
-    public static void registerSerializers(Config config) {
+  /**
+   * Creates a new {@link ImmutableSetSerializer} and registers its serializer
+   * for the several ImmutableSet related classes.
+   */
+  public static void registerSerializers(Config config) {
 
-        // ImmutableList (abstract class)
-        //  +- EmptyImmutableSet
-        //  |   EmptyImmutableSet
-        //  +- SingletonImmutableSet
-        //  |   Optimized for Set with only 1 element.
-        //  +- RegularImmutableSet
-        //  |   RegularImmutableList
-        //  +- EnumImmutableSet
-        //  |   EnumImmutableSet
+    // ImmutableList (abstract class)
+    //  +- EmptyImmutableSet
+    //  |   EmptyImmutableSet
+    //  +- SingletonImmutableSet
+    //  |   Optimized for Set with only 1 element.
+    //  +- RegularImmutableSet
+    //  |   RegularImmutableList
+    //  +- EnumImmutableSet
+    //  |   EnumImmutableSet
 
-        config.registerSerialization(ImmutableSet.class, SdkRepackImmuSetSerializer.class);
+    config.registerSerialization(ImmutableSet.class, SdkRepackImmuSetSerializer.class);
 
-        // Note:
-        //  Only registering above is good enough for serializing/deserializing.
-        //  but if using Kryo#copy, following is required.
+    // Note:
+    //  Only registering above is good enough for serializing/deserializing.
+    //  but if using Kryo#copy, following is required.
 
-        config.registerSerialization(ImmutableSet.of().getClass(), SdkRepackImmuSetSerializer.class);
-        config.registerSerialization(ImmutableSet.of(1).getClass(), SdkRepackImmuSetSerializer.class);
-        config.registerSerialization(ImmutableSet.of(1,2,3).getClass(), SdkRepackImmuSetSerializer.class);
+    config.registerSerialization(ImmutableSet.of().getClass(), SdkRepackImmuSetSerializer.class);
+    config.registerSerialization(ImmutableSet.of(1).getClass(), SdkRepackImmuSetSerializer.class);
+    config.registerSerialization(
+        ImmutableSet.of(1, 2, 3).getClass(),
+        SdkRepackImmuSetSerializer.class);
 
-        config.registerSerialization(
-                Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(), SdkRepackImmuSetSerializer.class);
-    }
+    config.registerSerialization(
+        Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B, SomeEnum.C).getClass(),
+        SdkRepackImmuSetSerializer.class);
+  }
 
-    private enum SomeEnum {
-        A, B, C
-    }
+  private enum SomeEnum {
+    A, B, C
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
index c8b0138..bcee778 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/serialization/UnmodifiableCollectionsSerializer.java
@@ -5,155 +5,177 @@ import com.alibaba.jstorm.esotericsoftware.kryo.Kryo;
 import com.alibaba.jstorm.esotericsoftware.kryo.Serializer;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Input;
 import com.alibaba.jstorm.esotericsoftware.kryo.io.Output;
-
 import java.lang.reflect.Field;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TreeMap;
+import java.util.TreeSet;
 
 public class UnmodifiableCollectionsSerializer extends Serializer<Object> {
 
-    private static final Field SOURCE_COLLECTION_FIELD;
-    private static final Field SOURCE_MAP_FIELD;
+  private static final Field SOURCE_COLLECTION_FIELD;
+  private static final Field SOURCE_MAP_FIELD;
 
-    static {
-        try {
-            SOURCE_COLLECTION_FIELD = Class.forName("java.util.Collections$UnmodifiableCollection" )
-                    .getDeclaredField( "c" );
-            SOURCE_COLLECTION_FIELD.setAccessible( true );
+  static {
+    try {
+      SOURCE_COLLECTION_FIELD = Class.forName("java.util.Collections$UnmodifiableCollection")
+          .getDeclaredField("c");
+      SOURCE_COLLECTION_FIELD.setAccessible(true);
 
 
-            SOURCE_MAP_FIELD = Class.forName("java.util.Collections$UnmodifiableMap" )
-                    .getDeclaredField( "m" );
-            SOURCE_MAP_FIELD.setAccessible( true );
-        } catch ( final Exception e ) {
-            throw new RuntimeException( "Could not access source collection" +
-                    " field in java.util.Collections$UnmodifiableCollection.", e );
-        }
+      SOURCE_MAP_FIELD = Class.forName("java.util.Collections$UnmodifiableMap")
+          .getDeclaredField("m");
+      SOURCE_MAP_FIELD.setAccessible(true);
+    } catch (final Exception e) {
+      throw new RuntimeException("Could not access source collection"
+          + " field in java.util.Collections$UnmodifiableCollection.", e);
     }
-
-    @Override
-    public Object read(final Kryo kryo, final Input input, final Class<Object> clazz) {
-        final int ordinal = input.readInt( true );
-        final UnmodifiableCollection unmodifiableCollection = UnmodifiableCollection.values()[ordinal];
-        final Object sourceCollection = kryo.readClassAndObject( input );
-        return unmodifiableCollection.create( sourceCollection );
+  }
+
+  @Override
+  public Object read(final Kryo kryo, final Input input, final Class<Object> clazz) {
+    final int ordinal = input.readInt(true);
+    final UnmodifiableCollection unmodifiableCollection = UnmodifiableCollection.values()[ordinal];
+    final Object sourceCollection = kryo.readClassAndObject(input);
+    return unmodifiableCollection.create(sourceCollection);
+  }
+
+  @Override
+  public void write(final Kryo kryo, final Output output, final Object object) {
+    try {
+      final UnmodifiableCollection unmodifiableCollection =
+          UnmodifiableCollection.valueOfType(object.getClass());
+      // the ordinal could be replaced by s.th. else (e.g. a explicitely managed "id")
+      output.writeInt(unmodifiableCollection.ordinal(), true);
+      kryo.writeClassAndObject(output, unmodifiableCollection.sourceCollectionField.get(object));
+    } catch (final RuntimeException e) {
+      // Don't eat and wrap RuntimeExceptions because the ObjectBuffer.write...
+      // handles SerializationException specifically (resizing the buffer)...
+      throw e;
+    } catch (final Exception e) {
+      throw new RuntimeException(e);
     }
-
-    @Override
-    public void write(final Kryo kryo, final Output output, final Object object) {
-        try {
-            final UnmodifiableCollection unmodifiableCollection = UnmodifiableCollection.valueOfType( object.getClass() );
-            // the ordinal could be replaced by s.th. else (e.g. a explicitely managed "id")
-            output.writeInt( unmodifiableCollection.ordinal(), true );
-            kryo.writeClassAndObject( output, unmodifiableCollection.sourceCollectionField.get( object ) );
-        } catch ( final RuntimeException e ) {
-            // Don't eat and wrap RuntimeExceptions because the ObjectBuffer.write...
-            // handles SerializationException specifically (resizing the buffer)...
-            throw e;
-        } catch ( final Exception e ) {
-            throw new RuntimeException( e );
-        }
+  }
+
+  @Override
+  public Object copy(Kryo kryo, Object original) {
+    try {
+      final UnmodifiableCollection unmodifiableCollection =
+          UnmodifiableCollection.valueOfType(original.getClass());
+      Object sourceCollectionCopy =
+          kryo.copy(unmodifiableCollection.sourceCollectionField.get(original));
+      return unmodifiableCollection.create(sourceCollectionCopy);
+    } catch (final RuntimeException e) {
+      // Don't eat and wrap RuntimeExceptions
+      throw e;
+    } catch (final Exception e) {
+      throw new RuntimeException(e);
     }
-
-    @Override
-    public Object copy(Kryo kryo, Object original) {
-        try {
-            final UnmodifiableCollection unmodifiableCollection = UnmodifiableCollection.valueOfType( original.getClass() );
-            Object sourceCollectionCopy = kryo.copy(unmodifiableCollection.sourceCollectionField.get(original));
-            return unmodifiableCollection.create( sourceCollectionCopy );
-        } catch ( final RuntimeException e ) {
-            // Don't eat and wrap RuntimeExceptions
-            throw e;
-        } catch ( final Exception e ) {
-            throw new RuntimeException( e );
-        }
+  }
+
+  private static enum UnmodifiableCollection {
+    COLLECTION(
+        Collections.unmodifiableCollection(Arrays.asList("")).getClass(),
+        SOURCE_COLLECTION_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableCollection((Collection<?>) sourceCollection);
+      }
+    },
+    RANDOM_ACCESS_LIST(
+        Collections.unmodifiableList(new ArrayList<Void>()).getClass(),
+        SOURCE_COLLECTION_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableList((List<?>) sourceCollection);
+      }
+    },
+    LIST(Collections.unmodifiableList(new LinkedList<Void>()).getClass(), SOURCE_COLLECTION_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableList((List<?>) sourceCollection);
+      }
+    },
+    SET(Collections.unmodifiableSet(new HashSet<Void>()).getClass(), SOURCE_COLLECTION_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableSet((Set<?>) sourceCollection);
+      }
+    },
+    SORTED_SET(
+        Collections.unmodifiableSortedSet(new TreeSet<Void>()).getClass(),
+        SOURCE_COLLECTION_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableSortedSet((SortedSet<?>) sourceCollection);
+      }
+    },
+    MAP(Collections.unmodifiableMap(new HashMap<Void, Void>()).getClass(), SOURCE_MAP_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableMap((Map<?, ?>) sourceCollection);
+      }
+
+    },
+    SORTED_MAP(
+        Collections.unmodifiableSortedMap(new TreeMap<Void, Void>()).getClass(),
+        SOURCE_MAP_FIELD) {
+      @Override
+      public Object create(final Object sourceCollection) {
+        return Collections.unmodifiableSortedMap((SortedMap<?, ?>) sourceCollection);
+      }
+    };
+
+    private final Class<?> type;
+    private final Field sourceCollectionField;
+
+    private UnmodifiableCollection(final Class<?> type, final Field sourceCollectionField) {
+      this.type = type;
+      this.sourceCollectionField = sourceCollectionField;
     }
 
-    private static enum UnmodifiableCollection {
-        COLLECTION( Collections.unmodifiableCollection( Arrays.asList( "" ) ).getClass(), SOURCE_COLLECTION_FIELD ){
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableCollection( (Collection<?>) sourceCollection );
-            }
-        },
-        RANDOM_ACCESS_LIST( Collections.unmodifiableList( new ArrayList<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableList( (List<?>) sourceCollection );
-            }
-        },
-        LIST( Collections.unmodifiableList( new LinkedList<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableList( (List<?>) sourceCollection );
-            }
-        },
-        SET( Collections.unmodifiableSet( new HashSet<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableSet( (Set<?>) sourceCollection );
-            }
-        },
-        SORTED_SET( Collections.unmodifiableSortedSet( new TreeSet<Void>() ).getClass(), SOURCE_COLLECTION_FIELD ){
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableSortedSet( (SortedSet<?>) sourceCollection );
-            }
-        },
-        MAP( Collections.unmodifiableMap( new HashMap<Void, Void>() ).getClass(), SOURCE_MAP_FIELD ) {
-
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableMap( (Map<?, ?>) sourceCollection );
-            }
-
-        },
-        SORTED_MAP( Collections.unmodifiableSortedMap( new TreeMap<Void, Void>() ).getClass(), SOURCE_MAP_FIELD ) {
-            @Override
-            public Object create( final Object sourceCollection ) {
-                return Collections.unmodifiableSortedMap( (SortedMap<?, ?>) sourceCollection );
-            }
-        };
-
-        private final Class<?> type;
-        private final Field sourceCollectionField;
-
-        private UnmodifiableCollection( final Class<?> type, final Field sourceCollectionField ) {
-            this.type = type;
-            this.sourceCollectionField = sourceCollectionField;
-        }
+    /**
+     * @param sourceCollection
+     */
+    public abstract Object create(Object sourceCollection);
 
-        /**
-         * @param sourceCollection
-         */
-        public abstract Object create( Object sourceCollection );
-
-        static UnmodifiableCollection valueOfType(final Class<?> type ) {
-            for( final UnmodifiableCollection item : values() ) {
-                if ( item.type.equals( type ) ) {
-                    return item;
-                }
-            }
-            throw new IllegalArgumentException( "The type " + type + " is not supported." );
+    static UnmodifiableCollection valueOfType(final Class<?> type) {
+      for (final UnmodifiableCollection item : values()) {
+        if (item.type.equals(type)) {
+          return item;
         }
-
+      }
+      throw new IllegalArgumentException("The type " + type + " is not supported.");
     }
 
-    /**
-     * Creates a new {@link UnmodifiableCollectionsSerializer} and registers its serializer
-     * for the several unmodifiable Collections that can be created via {@link Collections},
-     * including {@link Map}s.
-     *
-     * @see Collections#unmodifiableCollection(Collection)
-     * @see Collections#unmodifiableList(List)
-     * @see Collections#unmodifiableSet(Set)
-     * @see Collections#unmodifiableSortedSet(SortedSet)
-     * @see Collections#unmodifiableMap(Map)
-     * @see Collections#unmodifiableSortedMap(SortedMap)
-     */
-    public static void registerSerializers( Config config ) {
-        UnmodifiableCollection.values();
-        for ( final UnmodifiableCollection item : UnmodifiableCollection.values() ) {
-            config.registerSerialization( item.type, UnmodifiableCollectionsSerializer.class );
-        }
+  }
+
+  /**
+   * Creates a new {@link UnmodifiableCollectionsSerializer} and registers its serializer
+   * for the several unmodifiable Collections that can be created via {@link Collections},
+   * including {@link Map}s.
+   *
+   * @see Collections#unmodifiableCollection(Collection)
+   * @see Collections#unmodifiableList(List)
+   * @see Collections#unmodifiableSet(Set)
+   * @see Collections#unmodifiableSortedSet(SortedSet)
+   * @see Collections#unmodifiableMap(Map)
+   * @see Collections#unmodifiableSortedMap(SortedMap)
+   */
+  public static void registerSerializers(Config config) {
+    UnmodifiableCollection.values();
+    for (final UnmodifiableCollection item : UnmodifiableCollection.values()) {
+      config.registerSerialization(item.type, UnmodifiableCollectionsSerializer.class);
     }
+  }
 }


[53/53] [abbrv] beam git commit: This closes #3734

Posted by pe...@apache.org.
This closes #3734


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/e00e0e84
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/e00e0e84
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/e00e0e84

Branch: refs/heads/jstorm-runner
Commit: e00e0e841e19d427377bd576f26b7fbf62c3b9fb
Parents: 0a05de3 26bcdf3
Author: Pei He <pe...@apache.org>
Authored: Sun Aug 20 22:59:43 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sun Aug 20 22:59:43 2017 +0800

----------------------------------------------------------------------
 ...ostCommit_Java_ValidatesRunner_JStorm.groovy |  43 +
 runners/jstorm/pom.xml                          |  58 +-
 .../runners/jstorm/JStormPipelineOptions.java   |  50 ++
 .../beam/runners/jstorm/JStormRunner.java       | 383 ++++++++-
 .../runners/jstorm/JStormRunnerRegistrar.java   |  41 +-
 .../beam/runners/jstorm/JStormRunnerResult.java |  78 +-
 .../beam/runners/jstorm/TestJStormRunner.java   | 162 ++++
 .../BeamSdkRepackUtilsSerializer.java           | 287 +++++++
 .../serialization/BeamUtilsSerializer.java      | 114 +++
 .../serialization/GuavaUtilsSerializer.java     | 286 +++++++
 .../serialization/JStormUtilsSerializer.java    | 126 +++
 .../serialization/JavaUtilsSerializer.java      | 235 ++++++
 .../jstorm/serialization/package-info.java      |  22 +
 .../jstorm/translation/AbstractComponent.java   |  67 ++
 .../translation/BoundedSourceTranslator.java    |  48 ++
 .../jstorm/translation/CommonInstance.java      |  28 +
 .../jstorm/translation/DefaultStepContext.java  |  90 ++
 .../jstorm/translation/DoFnExecutor.java        | 348 ++++++++
 .../translation/DoFnRunnerWithMetrics.java      |  91 ++
 .../runners/jstorm/translation/Executor.java    |  42 +
 .../jstorm/translation/ExecutorContext.java     |  41 +
 .../jstorm/translation/ExecutorsBolt.java       | 366 ++++++++
 .../jstorm/translation/FlattenExecutor.java     |  67 ++
 .../jstorm/translation/FlattenTranslator.java   | 153 ++++
 .../translation/GroupByKeyTranslator.java       |  59 ++
 .../translation/GroupByWindowExecutor.java      | 204 +++++
 .../translation/JStormPipelineTranslator.java   | 184 +++++
 .../translation/JStormStateInternals.java       | 824 +++++++++++++++++++
 .../translation/JStormTimerInternals.java       |  96 +++
 .../jstorm/translation/MetricsReporter.java     |  86 ++
 .../translation/MultiOutputDoFnExecutor.java    |  55 ++
 .../translation/MultiStatefulDoFnExecutor.java  |  69 ++
 .../translation/ParDoBoundMultiTranslator.java  | 104 +++
 .../translation/SerializedPipelineOptions.java  |  65 ++
 .../translation/StatefulDoFnExecutor.java       |  67 ++
 .../beam/runners/jstorm/translation/Stream.java | 104 +++
 .../jstorm/translation/TimerService.java        |  53 ++
 .../jstorm/translation/TimerServiceImpl.java    | 164 ++++
 .../jstorm/translation/TransformTranslator.java |  85 ++
 .../jstorm/translation/TranslationContext.java  | 472 +++++++++++
 .../jstorm/translation/TranslatorRegistry.java  |  57 ++
 .../jstorm/translation/TxExecutorsBolt.java     | 133 +++
 .../translation/TxUnboundedSourceSpout.java     | 156 ++++
 .../translation/UnboundedSourceSpout.java       | 203 +++++
 .../translation/UnboundedSourceTranslator.java  |  45 +
 .../jstorm/translation/ViewExecutor.java        |  56 ++
 .../jstorm/translation/ViewTranslator.java      | 376 +++++++++
 .../translation/WindowAssignExecutor.java       | 110 +++
 .../translation/WindowAssignTranslator.java     |  41 +
 .../jstorm/translation/package-info.java        |  22 +
 .../jstorm/JStormRunnerRegistrarTest.java       |   4 +-
 .../translation/JStormStateInternalsTest.java   | 221 +++++
 runners/jstorm/src/test/resources/logback.xml   |  42 +
 runners/pom.xml                                 |   2 +-
 .../beam/sdk/transforms/ReshuffleTest.java      |  22 +-
 sdks/pom.xml                                    |   2 +-
 56 files changed, 7351 insertions(+), 58 deletions(-)
----------------------------------------------------------------------



[13/53] [abbrv] beam git commit: jstorm-runner: fix checkstyles.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
index 9df1e17..e80fb48 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
@@ -17,13 +17,15 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
-import java.io.IOException;
-import java.util.*;
+import static com.google.common.base.Preconditions.checkNotNull;
 
 import avro.shaded.com.google.common.base.Joiner;
 import avro.shaded.com.google.common.collect.Sets;
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
 import backtype.storm.tuple.ITupleExt;
-import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
 import com.alibaba.jstorm.cache.IKvStoreManager;
 import com.alibaba.jstorm.cache.KvStoreManagerFactory;
 import com.alibaba.jstorm.cluster.Common;
@@ -31,6 +33,14 @@ import com.alibaba.jstorm.utils.KryoSerializer;
 import com.google.common.base.Function;
 import com.google.common.collect.FluentIterable;
 import com.google.common.collect.Maps;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.KV;
@@ -39,289 +49,287 @@ import org.joda.time.Instant;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
 public class ExecutorsBolt extends AdaptorBasicBolt {
-    private static final long serialVersionUID = -7751043327801735211L;
+  private static final long serialVersionUID = -7751043327801735211L;
 
-    private static final Logger LOG = LoggerFactory.getLogger(ExecutorsBolt.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ExecutorsBolt.class);
 
-    protected ExecutorContext executorContext;
+  protected ExecutorContext executorContext;
 
-    protected TimerService timerService;
+  protected TimerService timerService;
 
-    // map from input tag to executor inside bolt
-    protected final Map<TupleTag, Executor> inputTagToExecutor = Maps.newHashMap();
-    // set of all output tags that will be emit outside bolt
-    protected final Set<TupleTag> outputTags = Sets.newHashSet();
-    protected final Set<TupleTag> externalOutputTags = Sets.newHashSet();
-    protected final Set<DoFnExecutor> doFnExecutors = Sets.newHashSet();
-    protected int internalDoFnExecutorId = 1;
-    protected final Map<Integer, DoFnExecutor> idToDoFnExecutor = Maps.newHashMap();
+  // map from input tag to executor inside bolt
+  protected final Map<TupleTag, Executor> inputTagToExecutor = Maps.newHashMap();
+  // set of all output tags that will be emit outside bolt
+  protected final Set<TupleTag> outputTags = Sets.newHashSet();
+  protected final Set<TupleTag> externalOutputTags = Sets.newHashSet();
+  protected final Set<DoFnExecutor> doFnExecutors = Sets.newHashSet();
+  protected int internalDoFnExecutorId = 1;
+  protected final Map<Integer, DoFnExecutor> idToDoFnExecutor = Maps.newHashMap();
 
-    protected OutputCollector collector;
+  protected OutputCollector collector;
 
-    protected boolean isStatefulBolt = false;
+  protected boolean isStatefulBolt = false;
 
-    protected KryoSerializer<WindowedValue> serializer;
+  protected KryoSerializer<WindowedValue> serializer;
 
-    public ExecutorsBolt() {
+  public ExecutorsBolt() {
 
-    }
-
-    public void setStatefulBolt(boolean isStateful) {
-        isStatefulBolt = isStateful;
-    }
-
-    public void addExecutor(TupleTag inputTag, Executor executor) {
-        inputTagToExecutor.put(
-                checkNotNull(inputTag, "inputTag"),
-                checkNotNull(executor, "executor"));
-    }
-
-    public Map<TupleTag, Executor> getExecutors() {
-        return inputTagToExecutor;
-    }
-
-    public void registerExecutor(Executor executor) {
-        if (executor instanceof DoFnExecutor) {
-            DoFnExecutor doFnExecutor = (DoFnExecutor) executor;
-            idToDoFnExecutor.put(internalDoFnExecutorId, doFnExecutor);
-            doFnExecutor.setInternalDoFnExecutorId(internalDoFnExecutorId);
-            internalDoFnExecutorId++;
-        }
-    }
-
-    public Map<Integer, DoFnExecutor> getIdToDoFnExecutor() {
-        return idToDoFnExecutor;
-    }
+  }
 
-    public void addOutputTags(TupleTag tag) {
-        outputTags.add(tag);
-    }
+  public void setStatefulBolt(boolean isStateful) {
+    isStatefulBolt = isStateful;
+  }
 
-    public void addExternalOutputTag(TupleTag<?> tag) {
-        externalOutputTags.add(tag);
-    }
+  public void addExecutor(TupleTag inputTag, Executor executor) {
+    inputTagToExecutor.put(
+        checkNotNull(inputTag, "inputTag"),
+        checkNotNull(executor, "executor"));
+  }
 
-    public Set<TupleTag> getOutputTags() {
-        return outputTags;
-    }
+  public Map<TupleTag, Executor> getExecutors() {
+    return inputTagToExecutor;
+  }
 
-    public ExecutorContext getExecutorContext() {
-        return executorContext;
+  public void registerExecutor(Executor executor) {
+    if (executor instanceof DoFnExecutor) {
+      DoFnExecutor doFnExecutor = (DoFnExecutor) executor;
+      idToDoFnExecutor.put(internalDoFnExecutorId, doFnExecutor);
+      doFnExecutor.setInternalDoFnExecutorId(internalDoFnExecutorId);
+      internalDoFnExecutorId++;
     }
-
-    @Override
-    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
-        LOG.info("Start to prepare for task-{}", context.getThisTaskId());
-        try {
-            this.collector = collector;
-
-            // init kv store manager
-            String storeName = String.format("task-%d", context.getThisTaskId());
-            String stateStorePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
-            IKvStoreManager kvStoreManager = isStatefulBolt ? KvStoreManagerFactory.getKvStoreManagerWithMonitor(context, storeName, stateStorePath, isStatefulBolt) :
-                    KvStoreManagerFactory.getKvStoreManager(stormConf, storeName, stateStorePath, isStatefulBolt);
-            this.executorContext = ExecutorContext.of(context, this, kvStoreManager);
-
-            // init time service
-            timerService = initTimerService();
-
-            // init all internal executors
-            for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
-                executor.init(executorContext);
-                if (executor instanceof DoFnExecutor) {
-                    doFnExecutors.add((DoFnExecutor) executor);
-                }
-            }
-
-            this.serializer = new KryoSerializer<WindowedValue>(stormConf);
-
-            LOG.info("ExecutorsBolt finished init. LocalExecutors={}", inputTagToExecutor.values());
-            LOG.info("inputTagToExecutor={}", inputTagToExecutor);
-            LOG.info("outputTags={}", outputTags);
-            LOG.info("externalOutputTags={}", externalOutputTags);
-            LOG.info("doFnExecutors={}", doFnExecutors);
-        } catch (IOException e) {
-            throw new RuntimeException("Failed to prepare executors bolt", e);
+  }
+
+  public Map<Integer, DoFnExecutor> getIdToDoFnExecutor() {
+    return idToDoFnExecutor;
+  }
+
+  public void addOutputTags(TupleTag tag) {
+    outputTags.add(tag);
+  }
+
+  public void addExternalOutputTag(TupleTag<?> tag) {
+    externalOutputTags.add(tag);
+  }
+
+  public Set<TupleTag> getOutputTags() {
+    return outputTags;
+  }
+
+  public ExecutorContext getExecutorContext() {
+    return executorContext;
+  }
+
+  @Override
+  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+    LOG.info("Start to prepare for task-{}", context.getThisTaskId());
+    try {
+      this.collector = collector;
+
+      // init kv store manager
+      String storeName = String.format("task-%d", context.getThisTaskId());
+      String stateStorePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
+      IKvStoreManager kvStoreManager = isStatefulBolt ?
+          KvStoreManagerFactory.getKvStoreManagerWithMonitor(
+              context, storeName, stateStorePath, isStatefulBolt) :
+          KvStoreManagerFactory.getKvStoreManager(
+              stormConf, storeName, stateStorePath, isStatefulBolt);
+      this.executorContext = ExecutorContext.of(context, this, kvStoreManager);
+
+      // init time service
+      timerService = initTimerService();
+
+      // init all internal executors
+      for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
+        executor.init(executorContext);
+        if (executor instanceof DoFnExecutor) {
+          doFnExecutors.add((DoFnExecutor) executor);
         }
-    }
+      }
 
-    public TimerService initTimerService() {
-        TopologyContext context = executorContext.getTopologyContext();
-        List<Integer> tasks = FluentIterable.from(context.getThisSourceComponentTasks().entrySet())
-                .transformAndConcat(
-                        new Function<Map.Entry<String, List<Integer>>, Iterable<Integer>>() {
-                            @Override
-                            public Iterable<Integer> apply(Map.Entry<String, List<Integer>> value) {
-                                if (Common.isSystemComponent(value.getKey())) {
-                                    return Collections.EMPTY_LIST;
-                                } else {
-                                    return value.getValue();
-                                }
-                            }
-                        })
-                .toList();
-        TimerService ret = new TimerServiceImpl(executorContext);
-        ret.init(tasks);
-        return ret;
-    }
+      this.serializer = new KryoSerializer<WindowedValue>(stormConf);
 
-    @Override
-    public void execute(Tuple input) {
-        // process a batch
-        String streamId = input.getSourceStreamId();
-        ITupleExt tuple = (ITupleExt) input;
-        Iterator<List<Object>> valueIterator = tuple.batchValues().iterator();
-        if (CommonInstance.BEAM_WATERMARK_STREAM_ID.equals(streamId)) {
-            while (valueIterator.hasNext()) {
-                processWatermark((Long) valueIterator.next().get(0), input.getSourceTask());
-            }
-        } else {
-            doFnStartBundle();
-            while (valueIterator.hasNext()) {
-                processElement(valueIterator.next(), streamId);
-            }
-            doFnFinishBundle();
-        }
+      LOG.info("ExecutorsBolt finished init. LocalExecutors={}", inputTagToExecutor.values());
+      LOG.info("inputTagToExecutor={}", inputTagToExecutor);
+      LOG.info("outputTags={}", outputTags);
+      LOG.info("externalOutputTags={}", externalOutputTags);
+      LOG.info("doFnExecutors={}", doFnExecutors);
+    } catch (IOException e) {
+      throw new RuntimeException("Failed to prepare executors bolt", e);
     }
-
-    private void processWatermark(long watermarkTs, int sourceTask) {
-        long newWaterMark = timerService.updateInputWatermark(sourceTask, watermarkTs);
-        LOG.debug("Recv waterMark-{} from task-{}, newWaterMark={}",
-                (new Instant(watermarkTs)).toDateTime(), sourceTask, (new Instant(newWaterMark)).toDateTime());
-        if (newWaterMark != 0) {
-            // Some buffer windows are going to be triggered.
-            doFnStartBundle();
-            timerService.fireTimers(newWaterMark);
-
-            // SideInput: If receiving water mark with max timestamp, It means no more data is supposed
-            // to be received from now on. So we are going to process all push back data.
-            if (newWaterMark == BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis()) {
-                for (DoFnExecutor doFnExecutor : doFnExecutors) {
-                    doFnExecutor.processAllPushBackElements();
+  }
+
+  public TimerService initTimerService() {
+    TopologyContext context = executorContext.getTopologyContext();
+    List<Integer> tasks = FluentIterable.from(context.getThisSourceComponentTasks().entrySet())
+        .transformAndConcat(
+            new Function<Map.Entry<String, List<Integer>>, Iterable<Integer>>() {
+              @Override
+              public Iterable<Integer> apply(Map.Entry<String, List<Integer>> value) {
+                if (Common.isSystemComponent(value.getKey())) {
+                  return Collections.EMPTY_LIST;
+                } else {
+                  return value.getValue();
                 }
-            }
-
-            doFnFinishBundle();
-        }
-
-        long currentWaterMark = timerService.currentOutputWatermark();
-        if (!externalOutputTags.isEmpty()) {
-            collector.flush();
-            collector.emit(
-                    CommonInstance.BEAM_WATERMARK_STREAM_ID,
-                    new Values(currentWaterMark));
-            LOG.debug("Send waterMark-{}", (new Instant(currentWaterMark)).toDateTime());
-        }
-    }
-
-    private void processElement(List<Object> values, String streamId) {
-        TupleTag inputTag = new TupleTag(streamId);
-        WindowedValue windowedValue = retrieveWindowedValueFromTupleValue(values);
-        processExecutorElem(inputTag, windowedValue);
+              }
+            })
+        .toList();
+    TimerService ret = new TimerServiceImpl(executorContext);
+    ret.init(tasks);
+    return ret;
+  }
+
+  @Override
+  public void execute(Tuple input) {
+    // process a batch
+    String streamId = input.getSourceStreamId();
+    ITupleExt tuple = (ITupleExt) input;
+    Iterator<List<Object>> valueIterator = tuple.batchValues().iterator();
+    if (CommonInstance.BEAM_WATERMARK_STREAM_ID.equals(streamId)) {
+      while (valueIterator.hasNext()) {
+        processWatermark((Long) valueIterator.next().get(0), input.getSourceTask());
+      }
+    } else {
+      doFnStartBundle();
+      while (valueIterator.hasNext()) {
+        processElement(valueIterator.next(), streamId);
+      }
+      doFnFinishBundle();
     }
-
-    public <T> void processExecutorElem(TupleTag<T> inputTag, WindowedValue<T> elem) {
-        LOG.debug("ProcessExecutorElem: inputTag={}, value={}", inputTag, elem.getValue());
-        if (elem != null) {
-            Executor executor = inputTagToExecutor.get(inputTag);
-            if (executor != null) {
-                executor.process(inputTag, elem);
-            }
-            if (externalOutputTags.contains(inputTag)) {
-                emitOutsideBolt(inputTag, elem);
-            }
-        } else {
-            LOG.info("Received null elem for tag={}", inputTag);
+  }
+
+  private void processWatermark(long watermarkTs, int sourceTask) {
+    long newWaterMark = timerService.updateInputWatermark(sourceTask, watermarkTs);
+    LOG.debug("Recv waterMark-{} from task-{}, newWaterMark={}",
+        (new Instant(watermarkTs)).toDateTime(),
+        sourceTask,
+        (new Instant(newWaterMark)).toDateTime());
+    if (newWaterMark != 0) {
+      // Some buffer windows are going to be triggered.
+      doFnStartBundle();
+      timerService.fireTimers(newWaterMark);
+
+      // SideInput: If receiving water mark with max timestamp, It means no more data is supposed
+      // to be received from now on. So we are going to process all push back data.
+      if (newWaterMark == BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis()) {
+        for (DoFnExecutor doFnExecutor : doFnExecutors) {
+          doFnExecutor.processAllPushBackElements();
         }
-    }
+      }
 
-    @Override
-    public void cleanup() {
-        for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
-            executor.cleanup();
-        }
-        executorContext.getKvStoreManager().close();
+      doFnFinishBundle();
     }
 
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
+    long currentWaterMark = timerService.currentOutputWatermark();
+    if (!externalOutputTags.isEmpty()) {
+      collector.flush();
+      collector.emit(
+          CommonInstance.BEAM_WATERMARK_STREAM_ID,
+          new Values(currentWaterMark));
+      LOG.debug("Send waterMark-{}", (new Instant(currentWaterMark)).toDateTime());
     }
-
-    public TimerService timerService() {
-        return timerService;
+  }
+
+  private void processElement(List<Object> values, String streamId) {
+    TupleTag inputTag = new TupleTag(streamId);
+    WindowedValue windowedValue = retrieveWindowedValueFromTupleValue(values);
+    processExecutorElem(inputTag, windowedValue);
+  }
+
+  public <T> void processExecutorElem(TupleTag<T> inputTag, WindowedValue<T> elem) {
+    LOG.debug("ProcessExecutorElem: inputTag={}, value={}", inputTag, elem.getValue());
+    if (elem != null) {
+      Executor executor = inputTagToExecutor.get(inputTag);
+      if (executor != null) {
+        executor.process(inputTag, elem);
+      }
+      if (externalOutputTags.contains(inputTag)) {
+        emitOutsideBolt(inputTag, elem);
+      }
+    } else {
+      LOG.info("Received null elem for tag={}", inputTag);
     }
+  }
 
-    public void setTimerService(TimerService service) {
-        timerService = service;
+  @Override
+  public void cleanup() {
+    for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
+      executor.cleanup();
     }
-
-    private WindowedValue retrieveWindowedValueFromTupleValue(List<Object> values) {
-        WindowedValue wv = null;
-        if (values.size() > 1) {
-            Object key = values.get(0);
-            WindowedValue value = serializer.deserialize((byte[]) values.get(1));
-            wv = value.withValue(KV.of(key, value.getValue()));
-        } else {
-            wv = serializer.deserialize((byte[])values.get(0));
-        }
-        return wv;
+    executorContext.getKvStoreManager().close();
+  }
+
+  @Override
+  public Map<String, Object> getComponentConfiguration() {
+    return null;
+  }
+
+  public TimerService timerService() {
+    return timerService;
+  }
+
+  public void setTimerService(TimerService service) {
+    timerService = service;
+  }
+
+  private WindowedValue retrieveWindowedValueFromTupleValue(List<Object> values) {
+    WindowedValue wv = null;
+    if (values.size() > 1) {
+      Object key = values.get(0);
+      WindowedValue value = serializer.deserialize((byte[]) values.get(1));
+      wv = value.withValue(KV.of(key, value.getValue()));
+    } else {
+      wv = serializer.deserialize((byte[]) values.get(0));
     }
-
-    protected void emitOutsideBolt(TupleTag outputTag, WindowedValue outputValue) {
-        LOG.debug("Output outside: tag={}, value={}", outputTag, outputValue.getValue());
-        if (keyedEmit(outputTag.getId())) {
-            KV kv = (KV) outputValue.getValue();
-            byte[] immutableOutputValue = serializer.serialize(outputValue.withValue(kv.getValue()));
-            // Convert WindowedValue<KV> to <K, WindowedValue<V>>
-            if (kv.getKey() == null) {
-                // If key is null, emit "null" string here. Because, null value will be ignored in JStorm.
-                collector.emit(outputTag.getId(), new Values("null", immutableOutputValue));
-            } else {
-                collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableOutputValue));
-            }
-        } else {
-            byte[] immutableOutputValue = serializer.serialize(outputValue);
-            collector.emit(outputTag.getId(), new Values(immutableOutputValue));
-        }
+    return wv;
+  }
+
+  protected void emitOutsideBolt(TupleTag outputTag, WindowedValue outputValue) {
+    LOG.debug("Output outside: tag={}, value={}", outputTag, outputValue.getValue());
+    if (keyedEmit(outputTag.getId())) {
+      KV kv = (KV) outputValue.getValue();
+      byte[] immutableOutputValue = serializer.serialize(outputValue.withValue(kv.getValue()));
+      // Convert WindowedValue<KV> to <K, WindowedValue<V>>
+      if (kv.getKey() == null) {
+        // If key is null, emit "null" string here. Because, null value will be ignored in JStorm.
+        collector.emit(outputTag.getId(), new Values("null", immutableOutputValue));
+      } else {
+        collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableOutputValue));
+      }
+    } else {
+      byte[] immutableOutputValue = serializer.serialize(outputValue);
+      collector.emit(outputTag.getId(), new Values(immutableOutputValue));
     }
+  }
 
-    private void doFnStartBundle() {
-        for (DoFnExecutor doFnExecutor : doFnExecutors) {
-            doFnExecutor.startBundle();
-        }
+  private void doFnStartBundle() {
+    for (DoFnExecutor doFnExecutor : doFnExecutors) {
+      doFnExecutor.startBundle();
     }
+  }
 
-    private void doFnFinishBundle() {
-        for (DoFnExecutor doFnExecutor : doFnExecutors) {
-            doFnExecutor.finishBundle();
-        }
+  private void doFnFinishBundle() {
+    for (DoFnExecutor doFnExecutor : doFnExecutors) {
+      doFnExecutor.finishBundle();
     }
+  }
 
-    @Override
-    public String toString() {
-        // LOG.info("bolt: " + executorContext.getTopologyContext().toJSONString());
-        List<String> ret = new ArrayList<>();
+  @Override
+  public String toString() {
+    // LOG.info("bolt: " + executorContext.getTopologyContext().toJSONString());
+    List<String> ret = new ArrayList<>();
         /*ret.add("inputTags");
         for (TupleTag inputTag : inputTagToExecutor.keySet()) {
             ret.add(inputTag.getId());
         }*/
-        ret.add("internalExecutors");
-        for (Executor executor : inputTagToExecutor.values()) {
-            ret.add(executor.toString());
-        }
-        ret.add("externalOutputTags");
-        for (TupleTag output : externalOutputTags) {
-            ret.add(output.getId());
-        }
-        return Joiner.on('\n').join(ret).concat("\n");
+    ret.add("internalExecutors");
+    for (Executor executor : inputTagToExecutor.values()) {
+      ret.add(executor.toString());
+    }
+    ret.add("externalOutputTags");
+    for (TupleTag output : externalOutputTags) {
+      ret.add(output.getId());
     }
+    return Joiner.on('\n').join(ret).concat("\n");
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
index 1ef28c9..5a07243 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
@@ -17,39 +17,40 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.TupleTag;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 public class FlattenExecutor<InputT> implements Executor {
 
-    private final String description;
-    private TupleTag mainOutputTag;
-    private ExecutorContext context;
-    private ExecutorsBolt executorsBolt;
-
-    public FlattenExecutor(String description, TupleTag mainTupleTag) {
-        this.description = checkNotNull(description, "description");
-        this.mainOutputTag = mainTupleTag;
-    }
-
-    @Override
-    public void init(ExecutorContext context) {
-        this.context = context;
-        this.executorsBolt = context.getExecutorsBolt();
-    }
-
-    @Override
-    public void process(TupleTag tag, WindowedValue elem) {
-        executorsBolt.processExecutorElem(mainOutputTag, elem);
-    }
-
-    @Override
-    public void cleanup() {}
-
-    @Override
-    public String toString() {
-        return description;
-    }
+  private final String description;
+  private TupleTag mainOutputTag;
+  private ExecutorContext context;
+  private ExecutorsBolt executorsBolt;
+
+  public FlattenExecutor(String description, TupleTag mainTupleTag) {
+    this.description = checkNotNull(description, "description");
+    this.mainOutputTag = mainTupleTag;
+  }
+
+  @Override
+  public void init(ExecutorContext context) {
+    this.context = context;
+    this.executorsBolt = context.getExecutorsBolt();
+  }
+
+  @Override
+  public void process(TupleTag tag, WindowedValue elem) {
+    executorsBolt.processExecutorElem(mainOutputTag, elem);
+  }
+
+  @Override
+  public void cleanup() {
+  }
+
+  @Override
+  public String toString() {
+    return description;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
index 299ceb2..625726d 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
@@ -17,18 +17,17 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
-import java.io.Serializable;
-import java.util.List;
+import static com.google.common.base.Preconditions.checkArgument;
 
-import org.apache.beam.runners.jstorm.JStormPipelineOptions;
-import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
-import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
 import com.google.common.collect.ImmutableList;
+import java.io.Serializable;
+import java.util.List;
 import org.apache.beam.runners.core.DoFnRunner;
 import org.apache.beam.runners.core.DoFnRunners;
 import org.apache.beam.runners.core.GroupAlsoByWindowViaWindowSetNewDoFn;
 import org.apache.beam.runners.core.KeyedWorkItem;
 import org.apache.beam.runners.core.KeyedWorkItems;
+import org.apache.beam.runners.core.NullSideInputReader;
 import org.apache.beam.runners.core.StateInternals;
 import org.apache.beam.runners.core.StateInternalsFactory;
 import org.apache.beam.runners.core.StateNamespace;
@@ -36,122 +35,138 @@ import org.apache.beam.runners.core.StateNamespaces;
 import org.apache.beam.runners.core.SystemReduceFn;
 import org.apache.beam.runners.core.TimerInternals;
 import org.apache.beam.runners.core.TimerInternalsFactory;
+import org.apache.beam.runners.jstorm.JStormPipelineOptions;
+import org.apache.beam.runners.jstorm.translation.TranslationContext;
+import org.apache.beam.runners.jstorm.translation.TranslationContext.UserGraphContext;
+import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
+import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
+import org.apache.beam.runners.jstorm.util.RunnerUtils;
 import org.apache.beam.sdk.coders.KvCoder;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.runners.core.NullSideInputReader;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.WindowingStrategy;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.TupleTag;
-
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.TranslationContext.UserGraphContext;
-import org.apache.beam.runners.jstorm.util.RunnerUtils;
+import org.apache.beam.sdk.values.WindowingStrategy;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static com.google.common.base.Preconditions.checkArgument;
-
-public class GroupByWindowExecutor<K, V> extends DoFnExecutor<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> {
-    private static final long serialVersionUID = -7563050475488610553L;
-
-    private static final Logger LOG = LoggerFactory.getLogger(GroupByWindowExecutor.class);
-
-    private class GroupByWindowOutputManager implements DoFnRunners.OutputManager, Serializable {
-
-        @Override
-        public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-            executorsBolt.processExecutorElem(tag, output);
-        }
-    }
-
-    private KvCoder<K, V> inputKvCoder;
-    private SystemReduceFn<K, V, Iterable<V>, Iterable<V>, BoundedWindow> reduceFn;
-
-    public GroupByWindowExecutor(
-            String stepName,
-            String description,
-            TranslationContext context,
-            JStormPipelineOptions pipelineOptions,
-            WindowingStrategy<?, ?> windowingStrategy,
-            TupleTag<KV<K, Iterable<V>>> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
-        // The doFn will be created when runtime. Just pass "null" here
-        super(stepName, description, pipelineOptions, null, null, windowingStrategy, null, null, null, mainTupleTag, sideOutputTags);
-
-        this.outputManager = new GroupByWindowOutputManager();
-        UserGraphContext userGraphContext = context.getUserGraphContext();
-        PCollection<KV<K, V>> input = (PCollection<KV<K, V>>) userGraphContext.getInput();
-        this.inputKvCoder = (KvCoder<K, V>) input.getCoder();
-    }
-
-    private DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getGroupByWindowDoFn() {
-        final StateInternalsFactory<K> stateFactory = new StateInternalsFactory<K>() {
-            @Override
-            public StateInternals stateInternalsForKey(K key) {
-                return new JStormStateInternals<K>(key, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
-            }
-        };
-        TimerInternalsFactory<K> timerFactory = new TimerInternalsFactory<K>() {
-            @Override
-            public TimerInternals timerInternalsForKey(K key) {
-                return new JStormTimerInternals<>(key, GroupByWindowExecutor.this, executorContext.getExecutorsBolt().timerService());
-            }
-        };
-
-        reduceFn = SystemReduceFn.buffering(inputKvCoder.getValueCoder());
-        DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFn =
-              GroupAlsoByWindowViaWindowSetNewDoFn.create(
-                  windowingStrategy, stateFactory, timerFactory, NullSideInputReader.empty(),
-                      (SystemReduceFn) reduceFn, outputManager, mainTupleTag);
-        return doFn;
-    }
-
-    @Override
-    protected DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getDoFnRunner() {
-        doFn = getGroupByWindowDoFn();
-
-        DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> simpleRunner = DoFnRunners.<KeyedWorkItem<K, V>, KV<K, Iterable<V>>>simpleRunner(
-                this.pipelineOptions,
-                this.doFn,
-                NullSideInputReader.empty(),
-                this.outputManager,
-                this.mainTupleTag,
-                this.sideOutputTags,
-                this.stepContext,
-                this.windowingStrategy);
-
-        DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFnRunner = DoFnRunners.lateDataDroppingRunner(
-                simpleRunner,
-                this.stepContext,
-                this.windowingStrategy);
-        return new DoFnRunnerWithMetrics<>(
-            stepName, doFnRunner, MetricsReporter.create(metricClient));
-    }
-
-    @Override
-    public void process(TupleTag tag, WindowedValue elem) {
-        /**
-         *  For GroupByKey, KV type elem is received. We need to convert the KV elem
-         *  into KeyedWorkItem first, which is the expected type in LateDataDroppingDoFnRunner.
-         */
-        KeyedWorkItem<K, V> keyedWorkItem = RunnerUtils.toKeyedWorkItem((WindowedValue<KV<K, V>>) elem);
-        runner.processElement(elem.withValue(keyedWorkItem));
-    }
+public class GroupByWindowExecutor<K, V>
+    extends DoFnExecutor<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> {
+  private static final long serialVersionUID = -7563050475488610553L;
 
-    @Override
-    public void onTimer(Object key, TimerInternals.TimerData timerData) {
-        StateNamespace namespace = timerData.getNamespace();
-        checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
+  private static final Logger LOG = LoggerFactory.getLogger(GroupByWindowExecutor.class);
 
-        runner.processElement(
-                WindowedValue.valueInGlobalWindow(
-                        KeyedWorkItems.<K, V>timersWorkItem((K) key, ImmutableList.of(timerData))));
-    }
+  private class GroupByWindowOutputManager implements DoFnRunners.OutputManager, Serializable {
 
     @Override
-    public String toString() {
-        return super.toString();
+    public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
+      executorsBolt.processExecutorElem(tag, output);
     }
+  }
+
+  private KvCoder<K, V> inputKvCoder;
+  private SystemReduceFn<K, V, Iterable<V>, Iterable<V>, BoundedWindow> reduceFn;
+
+  public GroupByWindowExecutor(
+      String stepName,
+      String description,
+      TranslationContext context,
+      JStormPipelineOptions pipelineOptions,
+      WindowingStrategy<?, ?> windowingStrategy,
+      TupleTag<KV<K, Iterable<V>>> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
+    // The doFn will be created when runtime. Just pass "null" here
+    super(
+        stepName,
+        description,
+        pipelineOptions,
+        null,
+        null,
+        windowingStrategy,
+        null,
+        null,
+        null,
+        mainTupleTag,
+        sideOutputTags);
+
+    this.outputManager = new GroupByWindowOutputManager();
+    UserGraphContext userGraphContext = context.getUserGraphContext();
+    PCollection<KV<K, V>> input = (PCollection<KV<K, V>>) userGraphContext.getInput();
+    this.inputKvCoder = (KvCoder<K, V>) input.getCoder();
+  }
+
+  private DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getGroupByWindowDoFn() {
+    final StateInternalsFactory<K> stateFactory = new StateInternalsFactory<K>() {
+      @Override
+      public StateInternals stateInternalsForKey(K key) {
+        return new JStormStateInternals<K>(
+            key, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
+      }
+    };
+    TimerInternalsFactory<K> timerFactory = new TimerInternalsFactory<K>() {
+      @Override
+      public TimerInternals timerInternalsForKey(K key) {
+        return new JStormTimerInternals<>(
+            key,
+            GroupByWindowExecutor.this,
+            executorContext.getExecutorsBolt().timerService());
+      }
+    };
+
+    reduceFn = SystemReduceFn.buffering(inputKvCoder.getValueCoder());
+    DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFn =
+        GroupAlsoByWindowViaWindowSetNewDoFn.create(
+            windowingStrategy, stateFactory, timerFactory, NullSideInputReader.empty(),
+            (SystemReduceFn) reduceFn, outputManager, mainTupleTag);
+    return doFn;
+  }
+
+  @Override
+  protected DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getDoFnRunner() {
+    doFn = getGroupByWindowDoFn();
+
+    DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> simpleRunner = DoFnRunners.simpleRunner(
+        this.pipelineOptions,
+        this.doFn,
+        NullSideInputReader.empty(),
+        this.outputManager,
+        this.mainTupleTag,
+        this.sideOutputTags,
+        this.stepContext,
+        this.windowingStrategy);
+
+    DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFnRunner =
+        DoFnRunners.lateDataDroppingRunner(
+            simpleRunner,
+            this.stepContext,
+            this.windowingStrategy);
+    return new DoFnRunnerWithMetrics<>(
+        stepName, doFnRunner, MetricsReporter.create(metricClient));
+  }
+
+  @Override
+  public void process(TupleTag tag, WindowedValue elem) {
+    /**
+     *  For GroupByKey, KV type elem is received. We need to convert the KV elem
+     *  into KeyedWorkItem first, which is the expected type in LateDataDroppingDoFnRunner.
+     */
+    KeyedWorkItem<K, V> keyedWorkItem = RunnerUtils.toKeyedWorkItem((WindowedValue<KV<K, V>>) elem);
+    runner.processElement(elem.withValue(keyedWorkItem));
+  }
+
+  @Override
+  public void onTimer(Object key, TimerInternals.TimerData timerData) {
+    StateNamespace namespace = timerData.getNamespace();
+    checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
+
+    runner.processElement(
+        WindowedValue.valueInGlobalWindow(
+            KeyedWorkItems.<K, V>timersWorkItem((K) key, ImmutableList.of(timerData))));
+  }
+
+  @Override
+  public String toString() {
+    return super.toString();
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
index cb15ea2..d36d9a6 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiOutputDoFnExecutor.java
@@ -17,6 +17,9 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
 import org.apache.beam.runners.jstorm.JStormPipelineOptions;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.transforms.DoFn;
@@ -27,49 +30,45 @@ import org.apache.beam.sdk.values.WindowingStrategy;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-
 public class MultiOutputDoFnExecutor<InputT, OutputT> extends DoFnExecutor<InputT, OutputT> {
-    private static final Logger LOG = LoggerFactory.getLogger(MultiOutputDoFnExecutor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MultiOutputDoFnExecutor.class);
 
-    /**
-     * For multi-output scenario,a "local" tuple tag is used in producer currently while a generated tag
-     * is used in downstream consumer. So before output, we need to map this "local" tag to "external"
-     * tag. See PCollectionTuple for details.
-     */
-    public class MultiOutputDoFnExecutorOutputManager extends DoFnExecutorOutputManager {
-        @Override
-        public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-            if (localTupleTagMap.containsKey(tag)) {
-                executorsBolt.processExecutorElem((TupleTag<T>) localTupleTagMap.get(tag), output);
-            } else {
-                executorsBolt.processExecutorElem(tag, output);
-            }
-        }
+  /**
+   * For multi-output scenario,a "local" tuple tag is used in producer currently while a generated
+   * tag is used in downstream consumer. So before output, we need to map this "local" tag to
+   * "external" tag. See PCollectionTuple for details.
+   */
+  public class MultiOutputDoFnExecutorOutputManager extends DoFnExecutorOutputManager {
+    @Override
+    public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
+      if (localTupleTagMap.containsKey(tag)) {
+        executorsBolt.processExecutorElem((TupleTag<T>) localTupleTagMap.get(tag), output);
+      } else {
+        executorsBolt.processExecutorElem(tag, output);
+      }
     }
+  }
 
-    protected Map<TupleTag<?>, TupleTag<?>> localTupleTagMap;
+  protected Map<TupleTag<?>, TupleTag<?>> localTupleTagMap;
 
-    public MultiOutputDoFnExecutor(
-            String stepName,
-            String description,
-            JStormPipelineOptions pipelineOptions,
-            DoFn<InputT, OutputT> doFn,
-            Coder<WindowedValue<InputT>> inputCoder,
-            WindowingStrategy<?, ?> windowingStrategy,
-            TupleTag<InputT> mainInputTag,
-            Collection<PCollectionView<?>> sideInputs,
-            Map<TupleTag, PCollectionView<?>> sideInputTagToView,
-            TupleTag<OutputT> mainTupleTag,
-            List<TupleTag<?>> sideOutputTags,
-            Map<TupleTag<?>, TupleTag<?>> localTupleTagMap
-            ) {
-        super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag,
-                sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
-        this.localTupleTagMap = localTupleTagMap;
-        this.outputManager = new MultiOutputDoFnExecutorOutputManager();
-        LOG.info("localTupleTagMap: {}", localTupleTagMap);
-    }
+  public MultiOutputDoFnExecutor(
+      String stepName,
+      String description,
+      JStormPipelineOptions pipelineOptions,
+      DoFn<InputT, OutputT> doFn,
+      Coder<WindowedValue<InputT>> inputCoder,
+      WindowingStrategy<?, ?> windowingStrategy,
+      TupleTag<InputT> mainInputTag,
+      Collection<PCollectionView<?>> sideInputs,
+      Map<TupleTag, PCollectionView<?>> sideInputTagToView,
+      TupleTag<OutputT> mainTupleTag,
+      List<TupleTag<?>> sideOutputTags,
+      Map<TupleTag<?>, TupleTag<?>> localTupleTagMap
+  ) {
+    super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag,
+        sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
+    this.localTupleTagMap = localTupleTagMap;
+    this.outputManager = new MultiOutputDoFnExecutorOutputManager();
+    LOG.info("localTupleTagMap: {}", localTupleTagMap);
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
index dd7921f..45ac62a 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MultiStatefulDoFnExecutor.java
@@ -17,10 +17,13 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.runners.core.TimerInternals;
 import org.apache.beam.runners.jstorm.JStormPipelineOptions;
 import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
 import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
-import org.apache.beam.runners.core.TimerInternals;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.util.WindowedValue;
@@ -29,40 +32,37 @@ import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
 
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-
 public class MultiStatefulDoFnExecutor<OutputT> extends MultiOutputDoFnExecutor<KV, OutputT> {
 
-    public MultiStatefulDoFnExecutor(
-        String stepName, String description,
-        JStormPipelineOptions pipelineOptions, DoFn<KV, OutputT> doFn,
-        Coder<WindowedValue<KV>> inputCoder, WindowingStrategy<?, ?> windowingStrategy,
-        TupleTag<KV> mainInputTag, Collection<PCollectionView<?>> sideInputs,
-        Map<TupleTag, PCollectionView<?>> sideInputTagToView, TupleTag<OutputT> mainTupleTag,
-        List<TupleTag<?>> sideOutputTags, Map<TupleTag<?>, TupleTag<?>> localTupleTagMap) {
-        super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag, sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags, localTupleTagMap);
-    }
+  public MultiStatefulDoFnExecutor(
+      String stepName, String description,
+      JStormPipelineOptions pipelineOptions, DoFn<KV, OutputT> doFn,
+      Coder<WindowedValue<KV>> inputCoder, WindowingStrategy<?, ?> windowingStrategy,
+      TupleTag<KV> mainInputTag, Collection<PCollectionView<?>> sideInputs,
+      Map<TupleTag, PCollectionView<?>> sideInputTagToView, TupleTag<OutputT> mainTupleTag,
+      List<TupleTag<?>> sideOutputTags, Map<TupleTag<?>, TupleTag<?>> localTupleTagMap) {
+    super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy, mainInputTag,
+        sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags, localTupleTagMap);
+  }
 
-    @Override
-    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-        if (mainInputTag.equals(tag)) {
-            WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
-            stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
-                    executorContext.getExecutorsBolt().timerService()));
-            stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
-                    kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-            processMainInput(elem);
-        } else {
-            processSideInput(tag, elem);
-        }
+  @Override
+  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+    if (mainInputTag.equals(tag)) {
+      WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
+      stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
+          executorContext.getExecutorsBolt().timerService()));
+      stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
+          kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+      processMainInput(elem);
+    } else {
+      processSideInput(tag, elem);
     }
+  }
 
-    @Override
-    public void onTimer(Object key, TimerInternals.TimerData timerData) {
-        stepContext.setStateInternals(new JStormStateInternals<>(key,
-                kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-        super.onTimer(key, timerData);
-    }
+  @Override
+  public void onTimer(Object key, TimerInternals.TimerData timerData) {
+    stepContext.setStateInternals(new JStormStateInternals<>(key,
+        kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+    super.onTimer(key, timerData);
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
index 7d20a4c..ba0c052 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/StatefulDoFnExecutor.java
@@ -17,10 +17,13 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.runners.core.TimerInternals;
 import org.apache.beam.runners.jstorm.JStormPipelineOptions;
 import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
 import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
-import org.apache.beam.runners.core.TimerInternals;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.util.WindowedValue;
@@ -29,39 +32,35 @@ import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
 
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-
 public class StatefulDoFnExecutor<OutputT> extends DoFnExecutor<KV, OutputT> {
-    public StatefulDoFnExecutor(
-            String stepName, String description, JStormPipelineOptions pipelineOptions,
-            DoFn<KV, OutputT> doFn, Coder<WindowedValue<KV>> inputCoder,
-            WindowingStrategy<?, ?> windowingStrategy, TupleTag<KV> mainInputTag,
-            Collection<PCollectionView<?>> sideInputs, Map<TupleTag, PCollectionView<?>>
-                    sideInputTagToView, TupleTag<OutputT> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
-        super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy,
-                mainInputTag, sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
-    }
+  public StatefulDoFnExecutor(
+      String stepName, String description, JStormPipelineOptions pipelineOptions,
+      DoFn<KV, OutputT> doFn, Coder<WindowedValue<KV>> inputCoder,
+      WindowingStrategy<?, ?> windowingStrategy, TupleTag<KV> mainInputTag,
+      Collection<PCollectionView<?>> sideInputs, Map<TupleTag, PCollectionView<?>>
+          sideInputTagToView, TupleTag<OutputT> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
+    super(stepName, description, pipelineOptions, doFn, inputCoder, windowingStrategy,
+        mainInputTag, sideInputs, sideInputTagToView, mainTupleTag, sideOutputTags);
+  }
 
-    @Override
-    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-        if (mainInputTag.equals(tag)) {
-            WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
-            stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
-                    executorContext.getExecutorsBolt().timerService()));
-            stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
-                    kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-            processMainInput(elem);
-        } else {
-            processSideInput(tag, elem);
-        }
+  @Override
+  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+    if (mainInputTag.equals(tag)) {
+      WindowedValue<KV> kvElem = (WindowedValue<KV>) elem;
+      stepContext.setTimerInternals(new JStormTimerInternals(kvElem.getValue().getKey(), this,
+          executorContext.getExecutorsBolt().timerService()));
+      stepContext.setStateInternals(new JStormStateInternals<>(kvElem.getValue().getKey(),
+          kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+      processMainInput(elem);
+    } else {
+      processSideInput(tag, elem);
     }
+  }
 
-    @Override
-    public void onTimer(Object key, TimerInternals.TimerData timerData) {
-        stepContext.setStateInternals(new JStormStateInternals<>(key,
-                kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-        super.onTimer(key, timerData);
-    }
+  @Override
+  public void onTimer(Object key, TimerInternals.TimerData timerData) {
+    stepContext.setStateInternals(new JStormStateInternals<>(key,
+        kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
+    super.onTimer(key, timerData);
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java
index 47db018..5c41bda 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerService.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,36 +17,35 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
-import org.apache.beam.runners.core.TimerInternals;
-import org.joda.time.Instant;
-
 import java.io.Serializable;
 import java.util.List;
+import org.apache.beam.runners.core.TimerInternals;
+import org.joda.time.Instant;
 
 /**
  * Interface that tracks input watermarks and manages timers in each bolt.
  */
 public interface TimerService extends Serializable {
 
-    void init(List<Integer> upStreamTasks);
+  void init(List<Integer> upStreamTasks);
 
-    /**
-     *
-     * @param task
-     * @param inputWatermark
-     * @return new watermark if any timer is triggered during the update of watermark, otherwise 0
-     */
-    long updateInputWatermark(Integer task, long inputWatermark);
+  /**
+   *
+   * @param task
+   * @param inputWatermark
+   * @return new watermark if any timer is triggered during the update of watermark, otherwise 0
+   */
+  long updateInputWatermark(Integer task, long inputWatermark);
 
-    long currentInputWatermark();
+  long currentInputWatermark();
 
-    long currentOutputWatermark();
+  long currentOutputWatermark();
 
-    void clearWatermarkHold(String namespace);
+  void clearWatermarkHold(String namespace);
 
-    void addWatermarkHold(String namespace, Instant watermarkHold);
+  void addWatermarkHold(String namespace, Instant watermarkHold);
 
-    void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor);
+  void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor);
 
-    void fireTimers(long newWatermark);
+  void fireTimers(long newWatermark);
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
index 3b864d5..d2514f1 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TimerServiceImpl.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,134 +17,139 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
 import avro.shaded.com.google.common.collect.Maps;
 import avro.shaded.com.google.common.collect.Sets;
 import com.alibaba.jstorm.utils.Pair;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
 import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.state.TimeDomain;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.joda.time.Instant;
 
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkState;
-
 /**
  * Default implementation of {@link TimerService}.
  */
 public class TimerServiceImpl implements TimerService {
-    private transient ExecutorContext executorContext;
-    private transient Map<Integer, DoFnExecutor> idToDoFnExecutor;
-
-    private final ConcurrentMap<Integer, Long> upStreamTaskToInputWatermark = new ConcurrentHashMap<>();
-    private final PriorityQueue<Long> inputWatermarks = new PriorityQueue<>();
-    private final PriorityQueue<Instant> watermarkHolds = new PriorityQueue<>();
-    private final Map<String, Instant> namespaceToWatermarkHold = new HashMap<>();
-    private transient final PriorityQueue<TimerInternals.TimerData> eventTimeTimersQueue = new PriorityQueue<>();
-    private final Map<TimerInternals.TimerData, Set<Pair<Integer, Object>>>
-            timerDataToKeyedExecutors = Maps.newHashMap();
-
-    private boolean initialized = false;
-
-    public TimerServiceImpl() {
-    }
-
-    public TimerServiceImpl(ExecutorContext executorContext) {
-        this.executorContext = executorContext;
-        this.idToDoFnExecutor = executorContext.getExecutorsBolt().getIdToDoFnExecutor();
-    }
-
-    @Override
-    public void init(List<Integer> upStreamTasks) {
-        for (Integer task : upStreamTasks) {
-            upStreamTaskToInputWatermark.put(task, BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
-            inputWatermarks.add(BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
-        }
-        initialized = true;
-    }
-
-    @Override
-    public synchronized long updateInputWatermark(Integer task, long taskInputWatermark) {
-        checkState(initialized, "TimerService has not been initialized.");
-        Long oldTaskInputWatermark = upStreamTaskToInputWatermark.get(task);
-        // Make sure the input watermark don't go backward.
-        if (taskInputWatermark > oldTaskInputWatermark) {
-            upStreamTaskToInputWatermark.put(task, taskInputWatermark);
-            inputWatermarks.add(taskInputWatermark);
-            inputWatermarks.remove(oldTaskInputWatermark);
-
-            long newLocalInputWatermark = currentInputWatermark();
-            if (newLocalInputWatermark > oldTaskInputWatermark) {
-                return newLocalInputWatermark;
-            }
-        }
-        return 0;
+  private transient ExecutorContext executorContext;
+  private transient Map<Integer, DoFnExecutor> idToDoFnExecutor;
+
+  private final ConcurrentMap<Integer, Long> upStreamTaskToInputWatermark =
+      new ConcurrentHashMap<>();
+  private final PriorityQueue<Long> inputWatermarks = new PriorityQueue<>();
+  private final PriorityQueue<Instant> watermarkHolds = new PriorityQueue<>();
+  private final Map<String, Instant> namespaceToWatermarkHold = new HashMap<>();
+  private transient final PriorityQueue<TimerInternals.TimerData> eventTimeTimersQueue =
+      new PriorityQueue<>();
+  private final Map<TimerInternals.TimerData, Set<Pair<Integer, Object>>>
+      timerDataToKeyedExecutors = Maps.newHashMap();
+
+  private boolean initialized = false;
+
+  public TimerServiceImpl() {
+  }
+
+  public TimerServiceImpl(ExecutorContext executorContext) {
+    this.executorContext = executorContext;
+    this.idToDoFnExecutor = executorContext.getExecutorsBolt().getIdToDoFnExecutor();
+  }
+
+  @Override
+  public void init(List<Integer> upStreamTasks) {
+    for (Integer task : upStreamTasks) {
+      upStreamTaskToInputWatermark.put(task, BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
+      inputWatermarks.add(BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis());
     }
-
-    @Override
-    public void fireTimers(long newWatermark) {
-        TimerInternals.TimerData timerData;
-        while ((timerData = eventTimeTimersQueue.peek()) != null
-                && timerData.getTimestamp().getMillis() <= newWatermark) {
-            for (Pair<Integer, Object> keyedExecutor : timerDataToKeyedExecutors.get(timerData)) {
-                DoFnExecutor executor = idToDoFnExecutor.get(keyedExecutor.getFirst());
-                executor.onTimer(keyedExecutor.getSecond(), timerData);
-            }
-            eventTimeTimersQueue.remove();
-            timerDataToKeyedExecutors.remove(timerData);
-        }
+    initialized = true;
+  }
+
+  @Override
+  public synchronized long updateInputWatermark(Integer task, long taskInputWatermark) {
+    checkState(initialized, "TimerService has not been initialized.");
+    Long oldTaskInputWatermark = upStreamTaskToInputWatermark.get(task);
+    // Make sure the input watermark don't go backward.
+    if (taskInputWatermark > oldTaskInputWatermark) {
+      upStreamTaskToInputWatermark.put(task, taskInputWatermark);
+      inputWatermarks.add(taskInputWatermark);
+      inputWatermarks.remove(oldTaskInputWatermark);
+
+      long newLocalInputWatermark = currentInputWatermark();
+      if (newLocalInputWatermark > oldTaskInputWatermark) {
+        return newLocalInputWatermark;
+      }
     }
-
-    @Override
-    public long currentInputWatermark() {
-        return initialized ? inputWatermarks.peek() : BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis();
+    return 0;
+  }
+
+  @Override
+  public void fireTimers(long newWatermark) {
+    TimerInternals.TimerData timerData;
+    while ((timerData = eventTimeTimersQueue.peek()) != null
+        && timerData.getTimestamp().getMillis() <= newWatermark) {
+      for (Pair<Integer, Object> keyedExecutor : timerDataToKeyedExecutors.get(timerData)) {
+        DoFnExecutor executor = idToDoFnExecutor.get(keyedExecutor.getFirst());
+        executor.onTimer(keyedExecutor.getSecond(), timerData);
+      }
+      eventTimeTimersQueue.remove();
+      timerDataToKeyedExecutors.remove(timerData);
     }
-
-    @Override
-    public long currentOutputWatermark() {
-        if (watermarkHolds.isEmpty()) {
-            return currentInputWatermark();
-        } else {
-            return Math.min(currentInputWatermark(), watermarkHolds.peek().getMillis());
-        }
+  }
+
+  @Override
+  public long currentInputWatermark() {
+    return initialized ? inputWatermarks.peek() : BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis();
+  }
+
+  @Override
+  public long currentOutputWatermark() {
+    if (watermarkHolds.isEmpty()) {
+      return currentInputWatermark();
+    } else {
+      return Math.min(currentInputWatermark(), watermarkHolds.peek().getMillis());
     }
-
-    @Override
-    public void clearWatermarkHold(String namespace) {
-        Instant currentHold = namespaceToWatermarkHold.get(namespace);
-        if (currentHold != null) {
-            watermarkHolds.remove(currentHold);
-            namespaceToWatermarkHold.remove(namespace);
-        }
+  }
+
+  @Override
+  public void clearWatermarkHold(String namespace) {
+    Instant currentHold = namespaceToWatermarkHold.get(namespace);
+    if (currentHold != null) {
+      watermarkHolds.remove(currentHold);
+      namespaceToWatermarkHold.remove(namespace);
     }
-
-    @Override
-    public void addWatermarkHold(String namespace, Instant watermarkHold) {
-        Instant currentHold = namespaceToWatermarkHold.get(namespace);
-        if (currentHold == null) {
-            namespaceToWatermarkHold.put(namespace, watermarkHold);
-            watermarkHolds.add(watermarkHold);
-        } else if (currentHold != null && watermarkHold.isBefore(currentHold)) {
-            namespaceToWatermarkHold.put(namespace, watermarkHold);
-            watermarkHolds.add(watermarkHold);
-            watermarkHolds.remove(currentHold);
-        }
+  }
+
+  @Override
+  public void addWatermarkHold(String namespace, Instant watermarkHold) {
+    Instant currentHold = namespaceToWatermarkHold.get(namespace);
+    if (currentHold == null) {
+      namespaceToWatermarkHold.put(namespace, watermarkHold);
+      watermarkHolds.add(watermarkHold);
+    } else if (currentHold != null && watermarkHold.isBefore(currentHold)) {
+      namespaceToWatermarkHold.put(namespace, watermarkHold);
+      watermarkHolds.add(watermarkHold);
+      watermarkHolds.remove(currentHold);
     }
-
-    @Override
-    public void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor) {
-        checkArgument(
-                TimeDomain.EVENT_TIME.equals(timerData.getDomain()),
-                String.format("Does not support domain: %s.", timerData.getDomain()));
-        Set<Pair<Integer, Object>> keyedExecutors = timerDataToKeyedExecutors.get(timerData);
-        if (keyedExecutors == null) {
-            keyedExecutors = Sets.newHashSet();
-            eventTimeTimersQueue.add(timerData);
-        }
-        keyedExecutors.add(new Pair<>(doFnExecutor.getInternalDoFnExecutorId(), key));
-        timerDataToKeyedExecutors.put(timerData, keyedExecutors);
+  }
+
+  @Override
+  public void setTimer(Object key, TimerInternals.TimerData timerData, DoFnExecutor doFnExecutor) {
+    checkArgument(
+        TimeDomain.EVENT_TIME.equals(timerData.getDomain()),
+        String.format("Does not support domain: %s.", timerData.getDomain()));
+    Set<Pair<Integer, Object>> keyedExecutors = timerDataToKeyedExecutors.get(timerData);
+    if (keyedExecutors == null) {
+      keyedExecutors = Sets.newHashSet();
+      eventTimeTimersQueue.add(timerData);
     }
+    keyedExecutors.add(new Pair<>(doFnExecutor.getInternalDoFnExecutorId(), key));
+    timerDataToKeyedExecutors.put(timerData, keyedExecutors);
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
index 0fb88ab..2bd5f7d 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxExecutorsBolt.java
@@ -24,108 +24,107 @@ import backtype.storm.tuple.Tuple;
 import com.alibaba.jstorm.cache.IKvStore;
 import com.alibaba.jstorm.cache.IKvStoreManager;
 import com.alibaba.jstorm.transactional.bolt.ITransactionStatefulBoltExecutor;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.IOException;
 import java.util.Map;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TxExecutorsBolt implements ITransactionStatefulBoltExecutor {
-    private static final Logger LOG = LoggerFactory.getLogger(TxExecutorsBolt.class);
-
-    private static final String TIME_SERVICE_STORE_ID = "timer_service_store";
-    private static final String TIMER_SERVICE_KET = "timer_service_key";
-
-    private ExecutorsBolt executorsBolt;
-    private IKvStoreManager kvStoreManager;
-    private IKvStore<String, TimerService> timerServiceStore;
-
-    public TxExecutorsBolt(ExecutorsBolt executorsBolt) {
-        this.executorsBolt = executorsBolt;
-        this.executorsBolt.setStatefulBolt(true);
-    }
-
-    @Override
-    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
-        try {
-            executorsBolt.prepare(stormConf, context, collector);
-            kvStoreManager = executorsBolt.getExecutorContext().getKvStoreManager();
-            timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
-        } catch (IOException e) {
-            LOG.error("Failed to prepare stateful bolt", e);
-            throw new RuntimeException(e.getMessage());
-        }
-    }
-
-    @Override
-    public void execute(Tuple input) {
-        executorsBolt.execute(input);
-    }
-
-    @Override
-    public void cleanup() {
-        executorsBolt.cleanup();
+  private static final Logger LOG = LoggerFactory.getLogger(TxExecutorsBolt.class);
+
+  private static final String TIME_SERVICE_STORE_ID = "timer_service_store";
+  private static final String TIMER_SERVICE_KET = "timer_service_key";
+
+  private ExecutorsBolt executorsBolt;
+  private IKvStoreManager kvStoreManager;
+  private IKvStore<String, TimerService> timerServiceStore;
+
+  public TxExecutorsBolt(ExecutorsBolt executorsBolt) {
+    this.executorsBolt = executorsBolt;
+    this.executorsBolt.setStatefulBolt(true);
+  }
+
+  @Override
+  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+    try {
+      executorsBolt.prepare(stormConf, context, collector);
+      kvStoreManager = executorsBolt.getExecutorContext().getKvStoreManager();
+      timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
+    } catch (IOException e) {
+      LOG.error("Failed to prepare stateful bolt", e);
+      throw new RuntimeException(e.getMessage());
     }
-
-    @Override
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        executorsBolt.declareOutputFields(declarer);
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return executorsBolt.getComponentConfiguration();
-    }
-
-    @Override
-    public void initState(Object userState) {
-        LOG.info("Begin to init from state: {}", userState);
-        restore(userState);
-    }
-
-    @Override
-    public Object finishBatch(long batchId) {
-        try {
-            timerServiceStore.put(TIMER_SERVICE_KET, executorsBolt.timerService());
-        } catch (IOException e) {
-            LOG.error("Failed to store current timer service status", e);
-            throw new RuntimeException(e.getMessage());
-        }
-        kvStoreManager.checkpoint(batchId);
-        return null;
+  }
+
+  @Override
+  public void execute(Tuple input) {
+    executorsBolt.execute(input);
+  }
+
+  @Override
+  public void cleanup() {
+    executorsBolt.cleanup();
+  }
+
+  @Override
+  public void declareOutputFields(OutputFieldsDeclarer declarer) {
+    executorsBolt.declareOutputFields(declarer);
+  }
+
+  @Override
+  public Map<String, Object> getComponentConfiguration() {
+    return executorsBolt.getComponentConfiguration();
+  }
+
+  @Override
+  public void initState(Object userState) {
+    LOG.info("Begin to init from state: {}", userState);
+    restore(userState);
+  }
+
+  @Override
+  public Object finishBatch(long batchId) {
+    try {
+      timerServiceStore.put(TIMER_SERVICE_KET, executorsBolt.timerService());
+    } catch (IOException e) {
+      LOG.error("Failed to store current timer service status", e);
+      throw new RuntimeException(e.getMessage());
     }
-
-    @Override
-    public Object commit(long batchId, Object state) {
-        return kvStoreManager.backup(batchId);
-    }
-
-    @Override
-    public void rollBack(Object userState) {
-        LOG.info("Begin to rollback from state: {}", userState);
-        restore(userState);
-    }
-
-    @Override
-    public void ackCommit(long batchId, long timeStamp) {
-        kvStoreManager.remove(batchId);
-    }
-
-    private void restore(Object userState) {
-        try {
-            // restore all states
-            kvStoreManager.restore(userState);
-
-            // init timer service
-            timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
-            TimerService timerService = timerServiceStore.get(TIMER_SERVICE_KET);
-            if (timerService == null) {
-                timerService = executorsBolt.initTimerService();
-            }
-            executorsBolt.setTimerService(timerService);
-        } catch (IOException e) {
-            LOG.error("Failed to restore state", e);
-            throw new RuntimeException(e.getMessage());
-        }
+    kvStoreManager.checkpoint(batchId);
+    return null;
+  }
+
+  @Override
+  public Object commit(long batchId, Object state) {
+    return kvStoreManager.backup(batchId);
+  }
+
+  @Override
+  public void rollBack(Object userState) {
+    LOG.info("Begin to rollback from state: {}", userState);
+    restore(userState);
+  }
+
+  @Override
+  public void ackCommit(long batchId, long timeStamp) {
+    kvStoreManager.remove(batchId);
+  }
+
+  private void restore(Object userState) {
+    try {
+      // restore all states
+      kvStoreManager.restore(userState);
+
+      // init timer service
+      timerServiceStore = kvStoreManager.getOrCreate(TIME_SERVICE_STORE_ID);
+      TimerService timerService = timerServiceStore.get(TIMER_SERVICE_KET);
+      if (timerService == null) {
+        timerService = executorsBolt.initTimerService();
+      }
+      executorsBolt.setTimerService(timerService);
+    } catch (IOException e) {
+      LOG.error("Failed to restore state", e);
+      throw new RuntimeException(e.getMessage());
     }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
index 22dd07b..16f7d99 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/TxUnboundedSourceSpout.java
@@ -24,130 +24,130 @@ import com.alibaba.jstorm.cache.IKvStore;
 import com.alibaba.jstorm.cache.IKvStoreManager;
 import com.alibaba.jstorm.cache.KvStoreManagerFactory;
 import com.alibaba.jstorm.transactional.spout.ITransactionSpoutExecutor;
-import org.apache.beam.sdk.io.UnboundedSource;
-import org.slf4j.LoggerFactory;
-
 import java.io.IOException;
 import java.util.Map;
+import org.apache.beam.sdk.io.UnboundedSource;
+import org.slf4j.LoggerFactory;
 
 public class TxUnboundedSourceSpout implements ITransactionSpoutExecutor {
-    private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(TxUnboundedSourceSpout.class);
-
-    private static final String SOURCE_STORE_ID = "SourceCheckpoint";
-    private static final String CHECKPOINT_MARK = "CheckpointMark";
-
-    private UnboundedSourceSpout sourceSpout;
-    private UnboundedSource.UnboundedReader reader;
-    private IKvStoreManager kvStoreManager;
-    private IKvStore<String, UnboundedSource.CheckpointMark> sourceCheckpointStore;
-
-    public TxUnboundedSourceSpout(UnboundedSourceSpout sourceSpout) {
-        this.sourceSpout = sourceSpout;
-    }
-
-    private void restore(Object userState) {
-        try {
-            kvStoreManager.restore(userState);
-            sourceCheckpointStore = kvStoreManager.getOrCreate(SOURCE_STORE_ID);
-            UnboundedSource.CheckpointMark checkpointMark = sourceCheckpointStore.get(CHECKPOINT_MARK);
-            sourceSpout.createSourceReader(checkpointMark);
-            reader = sourceSpout.getUnboundedSourceReader();
-        } catch (IOException e) {
-            LOG.error("Failed to init state", e);
-            throw new RuntimeException(e.getMessage());
-        }
-    }
-
-    @Override
-    public void initState(Object userState) {
-        restore(userState);
-    }
-
-    @Override
-    public Object finishBatch(long checkpointId) {
-        try {
-            // Store check point mark from unbounded source reader
-            UnboundedSource.CheckpointMark checkpointMark = reader.getCheckpointMark();
-            sourceCheckpointStore.put(CHECKPOINT_MARK, checkpointMark);
-
-            // checkpoint all kv stores in current manager
-            kvStoreManager.checkpoint(checkpointId);
-        } catch (IOException e) {
-            LOG.error(String.format("Failed to finish batch-%s", checkpointId), e);
-            throw new RuntimeException(e.getMessage());
-        }
-        return null;
-    }
-
-    @Override
-    public Object commit(long batchId, Object state) {
-        // backup kv stores to remote state backend
-        return kvStoreManager.backup(batchId);
-    }
-
-    @Override
-    public void rollBack(Object userState) {
-        restore(userState);
-    }
-
-    @Override
-    public void ackCommit(long batchId, long timeStamp) {
-        // remove obsolete state in bolt local and remote state backend
-        kvStoreManager.remove(batchId);
-    }
-
-    @Override
-    public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        sourceSpout.declareOutputFields(declarer);
-    }
-
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return sourceSpout.getComponentConfiguration();
-    }
-
-    @Override
-    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-        try {
-            sourceSpout.open(conf, context, collector);
-            String storeName = String.format("task-%s", context.getThisTaskId());
-            String storePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
-            kvStoreManager = KvStoreManagerFactory.getKvStoreManagerWithMonitor(context, storeName, storePath, true);
-
-            reader = sourceSpout.getUnboundedSourceReader();
-        } catch (IOException e) {
-            LOG.error("Failed to open transactional unbounded source spout", e);
-            throw new RuntimeException(e.getMessage());
-        }
-    }
-
-    @Override
-    public void close() {
-        sourceSpout.close();
-    }
-
-    @Override
-    public void activate() {
-        sourceSpout.activate();
-    }
-
-    @Override
-    public void deactivate() {
-        sourceSpout.deactivate();
-    }
-
-    @Override
-    public void nextTuple() {
-        sourceSpout.nextTuple();
-    }
-
-    @Override
-    public void ack(Object msgId) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public void fail(Object msgId) {
-        throw new UnsupportedOperationException();
-    }
+  private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(TxUnboundedSourceSpout.class);
+
+  private static final String SOURCE_STORE_ID = "SourceCheckpoint";
+  private static final String CHECKPOINT_MARK = "CheckpointMark";
+
+  private UnboundedSourceSpout sourceSpout;
+  private UnboundedSource.UnboundedReader reader;
+  private IKvStoreManager kvStoreManager;
+  private IKvStore<String, UnboundedSource.CheckpointMark> sourceCheckpointStore;
+
+  public TxUnboundedSourceSpout(UnboundedSourceSpout sourceSpout) {
+    this.sourceSpout = sourceSpout;
+  }
+
+  private void restore(Object userState) {
+    try {
+      kvStoreManager.restore(userState);
+      sourceCheckpointStore = kvStoreManager.getOrCreate(SOURCE_STORE_ID);
+      UnboundedSource.CheckpointMark checkpointMark = sourceCheckpointStore.get(CHECKPOINT_MARK);
+      sourceSpout.createSourceReader(checkpointMark);
+      reader = sourceSpout.getUnboundedSourceReader();
+    } catch (IOException e) {
+      LOG.error("Failed to init state", e);
+      throw new RuntimeException(e.getMessage());
+    }
+  }
+
+  @Override
+  public void initState(Object userState) {
+    restore(userState);
+  }
+
+  @Override
+  public Object finishBatch(long checkpointId) {
+    try {
+      // Store check point mark from unbounded source reader
+      UnboundedSource.CheckpointMark checkpointMark = reader.getCheckpointMark();
+      sourceCheckpointStore.put(CHECKPOINT_MARK, checkpointMark);
+
+      // checkpoint all kv stores in current manager
+      kvStoreManager.checkpoint(checkpointId);
+    } catch (IOException e) {
+      LOG.error(String.format("Failed to finish batch-%s", checkpointId), e);
+      throw new RuntimeException(e.getMessage());
+    }
+    return null;
+  }
+
+  @Override
+  public Object commit(long batchId, Object state) {
+    // backup kv stores to remote state backend
+    return kvStoreManager.backup(batchId);
+  }
+
+  @Override
+  public void rollBack(Object userState) {
+    restore(userState);
+  }
+
+  @Override
+  public void ackCommit(long batchId, long timeStamp) {
+    // remove obsolete state in bolt local and remote state backend
+    kvStoreManager.remove(batchId);
+  }
+
+  @Override
+  public void declareOutputFields(OutputFieldsDeclarer declarer) {
+    sourceSpout.declareOutputFields(declarer);
+  }
+
+  @Override
+  public Map<String, Object> getComponentConfiguration() {
+    return sourceSpout.getComponentConfiguration();
+  }
+
+  @Override
+  public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+    try {
+      sourceSpout.open(conf, context, collector);
+      String storeName = String.format("task-%s", context.getThisTaskId());
+      String storePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
+      kvStoreManager = KvStoreManagerFactory.getKvStoreManagerWithMonitor(
+          context, storeName, storePath, true);
+
+      reader = sourceSpout.getUnboundedSourceReader();
+    } catch (IOException e) {
+      LOG.error("Failed to open transactional unbounded source spout", e);
+      throw new RuntimeException(e.getMessage());
+    }
+  }
+
+  @Override
+  public void close() {
+    sourceSpout.close();
+  }
+
+  @Override
+  public void activate() {
+    sourceSpout.activate();
+  }
+
+  @Override
+  public void deactivate() {
+    sourceSpout.deactivate();
+  }
+
+  @Override
+  public void nextTuple() {
+    sourceSpout.nextTuple();
+  }
+
+  @Override
+  public void ack(Object msgId) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public void fail(Object msgId) {
+    throw new UnsupportedOperationException();
+  }
 }
\ No newline at end of file


[42/53] [abbrv] beam git commit: jstorm-runner: Support multiple copies of Flatten

Posted by pe...@apache.org.
jstorm-runner: Support multiple copies of Flatten


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/1178f9fb
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/1178f9fb
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/1178f9fb

Branch: refs/heads/jstorm-runner
Commit: 1178f9fb957c7e6cf1b277696ff63dc0e29a6d5e
Parents: 52913b7
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Thu Jul 20 20:04:24 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:03:00 2017 +0800

----------------------------------------------------------------------
 .../runners/jstorm/translation/FlattenExecutor.java  | 12 ++++++++++--
 .../jstorm/translation/FlattenTranslator.java        | 15 +++++++++++++--
 2 files changed, 23 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/1178f9fb/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java
index a64f494..928fa24 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenExecutor.java
@@ -19,6 +19,8 @@ package org.apache.beam.runners.jstorm.translation;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 
+import java.util.Map;
+
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.TupleTag;
 
@@ -32,8 +34,11 @@ class FlattenExecutor<InputT> implements Executor {
   private TupleTag mainOutputTag;
   private ExecutorContext context;
   private ExecutorsBolt executorsBolt;
+  private final Map<TupleTag, Integer> tagToCopyNum;
 
-  public FlattenExecutor(String description, TupleTag mainTupleTag) {
+  public FlattenExecutor(String description, TupleTag mainTupleTag,
+                         Map<TupleTag, Integer> tagToCopyNum) {
+    this.tagToCopyNum = checkNotNull(tagToCopyNum, "tagToCopyNum");
     this.description = checkNotNull(description, "description");
     this.mainOutputTag = mainTupleTag;
   }
@@ -46,7 +51,10 @@ class FlattenExecutor<InputT> implements Executor {
 
   @Override
   public void process(TupleTag tag, WindowedValue elem) {
-    executorsBolt.processExecutorElem(mainOutputTag, elem);
+    int copyNum = tagToCopyNum.get(tag);
+    for (int i = 0; i < copyNum; i++) {
+      executorsBolt.processExecutorElem(mainOutputTag, elem);
+    }
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/1178f9fb/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
index e104ad8..b96bc56 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
@@ -48,9 +48,19 @@ class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PCollecti
 
     // Since a new tag is created in PCollectionList, retrieve the real tag here.
     Map<TupleTag<?>, PValue> inputs = Maps.newHashMap();
+    Map<TupleTag<?>, Integer> tagToCopyNum = Maps.newHashMap();
     for (Map.Entry<TupleTag<?>, PValue> entry : userGraphContext.getInputs().entrySet()) {
       PCollection<V> pc = (PCollection<V>) entry.getValue();
-      inputs.putAll(pc.expand());
+      //inputs.putAll(pc.expand());
+      for (Map.Entry<TupleTag<?>, PValue> entry1 : pc.expand().entrySet()) {
+        if (inputs.containsKey(entry1.getKey())) {
+          int copyNum = tagToCopyNum.get(entry1.getKey());
+          tagToCopyNum.put(entry1.getKey(), ++copyNum);
+        } else {
+          inputs.put(entry1.getKey(), entry1.getValue());
+          tagToCopyNum.put(entry1.getKey(), 1);
+        }
+      }
     }
     String description = describeTransform(transform, inputs, userGraphContext.getOutputs());
 
@@ -67,7 +77,8 @@ class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PCollecti
       context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(tag, output));
 
     } else {
-      FlattenExecutor executor = new FlattenExecutor(description, userGraphContext.getOutputTag());
+      FlattenExecutor executor = new FlattenExecutor(description, userGraphContext.getOutputTag(),
+          tagToCopyNum);
       context.addTransformExecutor(executor, inputs, userGraphContext.getOutputs());
     }
   }


[37/53] [abbrv] beam git commit: jstorm-runner: support Flatten with empty inputs.

Posted by pe...@apache.org.
jstorm-runner: support Flatten with empty inputs.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/aca16cc9
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/aca16cc9
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/aca16cc9

Branch: refs/heads/jstorm-runner
Commit: aca16cc9b2224b9bfce98719c6ef2abbad94f7df
Parents: 4d634ec
Author: Pei He <pe...@apache.org>
Authored: Wed Jul 19 15:34:56 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:59 2017 +0800

----------------------------------------------------------------------
 .../jstorm/translation/FlattenTranslator.java   | 104 ++++++++++++++++++-
 1 file changed, 100 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/aca16cc9/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
index 89708df..8f239bf 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/FlattenTranslator.java
@@ -18,11 +18,24 @@
 package org.apache.beam.runners.jstorm.translation;
 
 import com.google.common.collect.Maps;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
 import java.util.Map;
+import java.util.NoSuchElementException;
+import javax.annotation.Nullable;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.VoidCoder;
+import org.apache.beam.sdk.io.UnboundedSource;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TaggedPValue;
 import org.apache.beam.sdk.values.TupleTag;
+import org.joda.time.Instant;
 
 /**
  * Translates a {@link Flatten} to a JStorm {@link FlattenExecutor}.
@@ -40,10 +53,93 @@ class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PCollecti
       PCollection<V> pc = (PCollection<V>) entry.getValue();
       inputs.putAll(pc.expand());
     }
-    System.out.println("Real inputs: " + inputs);
-    System.out.println("FlattenList inputs: " + userGraphContext.getInputs());
     String description = describeTransform(transform, inputs, userGraphContext.getOutputs());
-    FlattenExecutor executor = new FlattenExecutor(description, userGraphContext.getOutputTag());
-    context.addTransformExecutor(executor, inputs, userGraphContext.getOutputs());
+
+    if (inputs.isEmpty()) {
+      // Create a empty source
+      TupleTag<?> tag = userGraphContext.getOutputTag();
+      PValue output = userGraphContext.getOutput();
+
+      UnboundedSourceSpout spout = new UnboundedSourceSpout(
+          description,
+          new EmptySource(),
+          userGraphContext.getOptions(),
+          tag);
+      context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(tag, output));
+
+    } else {
+      FlattenExecutor executor = new FlattenExecutor(description, userGraphContext.getOutputTag());
+      context.addTransformExecutor(executor, inputs, userGraphContext.getOutputs());
+    }
+  }
+
+  private static class EmptySource extends UnboundedSource<Void, UnboundedSource.CheckpointMark> {
+    @Override
+    public List<? extends UnboundedSource<Void, CheckpointMark>> split(
+        int i, PipelineOptions pipelineOptions) throws Exception {
+      return Collections.singletonList(this);
+    }
+
+    @Override
+    public UnboundedReader<Void> createReader(
+        PipelineOptions pipelineOptions,
+        @Nullable CheckpointMark checkpointMark) throws IOException {
+      return new EmptyReader();
+    }
+
+    @Override
+    public Coder<CheckpointMark> getCheckpointMarkCoder() {
+      return null;
+    }
+
+    @Override
+    public void validate() {
+    }
+
+    @Override
+    public Coder<Void> getDefaultOutputCoder() {
+      return VoidCoder.of();
+    }
+
+    private class EmptyReader extends UnboundedReader<Void> {
+      @Override
+      public boolean start() throws IOException {
+        return false;
+      }
+
+      @Override
+      public boolean advance() throws IOException {
+        return false;
+      }
+
+      @Override
+      public Void getCurrent() throws NoSuchElementException {
+        throw new NoSuchElementException();
+      }
+
+      @Override
+      public Instant getCurrentTimestamp() throws NoSuchElementException {
+        throw new NoSuchElementException();
+      }
+
+      @Override
+      public void close() throws IOException {
+      }
+
+      @Override
+      public Instant getWatermark() {
+        return BoundedWindow.TIMESTAMP_MAX_VALUE;
+      }
+
+      @Override
+      public CheckpointMark getCheckpointMark() {
+        return null;
+      }
+
+      @Override
+      public UnboundedSource<Void, ?> getCurrentSource() {
+        return EmptySource.this;
+      }
+    }
   }
 }


[23/53] [abbrv] beam git commit: jstorm-runner: move most classes to translation package and reduece their visibility to package private.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java
new file mode 100644
index 0000000..dab9518
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceSpout.java
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import backtype.storm.spout.SpoutOutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.IRichSpout;
+import backtype.storm.tuple.Values;
+import com.alibaba.jstorm.utils.KryoSerializer;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.beam.runners.jstorm.JStormPipelineOptions;
+import org.apache.beam.sdk.io.UnboundedSource;
+import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
+import org.apache.beam.sdk.transforms.windowing.PaneInfo;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.TupleTag;
+import org.joda.time.Instant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Spout implementation that wraps a Beam UnboundedSource.
+ * TODO: add wrapper to support metrics in UnboundedSource.
+ */
+public class UnboundedSourceSpout extends AbstractComponent implements IRichSpout {
+  private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceSpout.class);
+
+  private final String description;
+  private final UnboundedSource source;
+  private final SerializedPipelineOptions serializedOptions;
+  private final TupleTag<?> outputTag;
+
+  private transient JStormPipelineOptions pipelineOptions;
+  private transient UnboundedSource.UnboundedReader reader;
+  private transient SpoutOutputCollector collector;
+
+  private volatile boolean hasNextRecord;
+  private AtomicBoolean activated = new AtomicBoolean();
+
+  private KryoSerializer<WindowedValue> serializer;
+
+  private long lastWaterMark = 0L;
+
+  public UnboundedSourceSpout(
+      String description,
+      UnboundedSource source,
+      JStormPipelineOptions options,
+      TupleTag<?> outputTag) {
+    this.description = checkNotNull(description, "description");
+    this.source = checkNotNull(source, "source");
+    this.serializedOptions = new SerializedPipelineOptions(checkNotNull(options, "options"));
+    this.outputTag = checkNotNull(outputTag, "outputTag");
+  }
+
+  @Override
+  public synchronized void close() {
+    try {
+      activated.set(false);
+      this.reader.close();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+  }
+
+  @Override
+  public void activate() {
+    activated.set(true);
+
+  }
+
+  @Override
+  public void deactivate() {
+    activated.set(false);
+  }
+
+  @Override
+  public void ack(Object msgId) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public void fail(Object msgId) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public Map<String, Object> getComponentConfiguration() {
+    return null;
+  }
+
+  @Override
+  public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+    try {
+      this.collector = collector;
+      this.pipelineOptions =
+          this.serializedOptions.getPipelineOptions().as(JStormPipelineOptions.class);
+
+      createSourceReader(null);
+
+      this.serializer = new KryoSerializer<>(conf);
+    } catch (IOException e) {
+      throw new RuntimeException("Unable to create unbounded reader.", e);
+    }
+  }
+
+  public void createSourceReader(UnboundedSource.CheckpointMark checkpointMark) throws IOException {
+    if (reader != null) {
+      reader.close();
+    }
+    reader = this.source.createReader(this.pipelineOptions, checkpointMark);
+    hasNextRecord = this.reader.start();
+  }
+
+  @Override
+  public synchronized void nextTuple() {
+    if (!activated.get()) {
+      return;
+    }
+    try {
+      if (!hasNextRecord) {
+        hasNextRecord = reader.advance();
+      }
+
+      while (hasNextRecord && activated.get()) {
+        Object value = reader.getCurrent();
+        Instant timestamp = reader.getCurrentTimestamp();
+
+        WindowedValue wv =
+            WindowedValue.of(value, timestamp, GlobalWindow.INSTANCE, PaneInfo.NO_FIRING);
+        LOG.debug("Source output: " + wv.getValue());
+        if (keyedEmit(outputTag.getId())) {
+          KV kv = (KV) wv.getValue();
+          // Convert WindowedValue<KV> to <K, WindowedValue<V>>
+          byte[] immutableValue = serializer.serialize(wv.withValue(kv.getValue()));
+          collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableValue));
+        } else {
+          byte[] immutableValue = serializer.serialize(wv);
+          collector.emit(outputTag.getId(), new Values(immutableValue));
+        }
+
+        // move to next record
+        hasNextRecord = reader.advance();
+      }
+
+      Instant waterMark = reader.getWatermark();
+      if (waterMark != null && lastWaterMark < waterMark.getMillis()) {
+        lastWaterMark = waterMark.getMillis();
+        collector.flush();
+        collector.emit(CommonInstance.BEAM_WATERMARK_STREAM_ID, new Values(waterMark.getMillis()));
+        LOG.debug("Source output: WM-{}", waterMark.toDateTime());
+      }
+    } catch (IOException e) {
+      throw new RuntimeException("Exception reading values from source.", e);
+    }
+  }
+
+  public UnboundedSource getUnboundedSource() {
+    return source;
+  }
+
+  public UnboundedSource.UnboundedReader getUnboundedSourceReader() {
+    return reader;
+  }
+
+  @Override
+  public String toString() {
+    return description;
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceTranslator.java
new file mode 100644
index 0000000..54c9b94
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/UnboundedSourceTranslator.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import org.apache.beam.sdk.io.Read;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TaggedPValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ * Translates a Read.Unbounded into a Storm spout.
+ *
+ * @param <T>
+ */
+class UnboundedSourceTranslator<T> extends TransformTranslator.Default<Read.Unbounded<T>> {
+  public void translateNode(Read.Unbounded<T> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    String description =
+        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+
+    TupleTag<?> tag = userGraphContext.getOutputTag();
+    PValue output = userGraphContext.getOutput();
+
+    UnboundedSourceSpout spout = new UnboundedSourceSpout(
+        description,
+        transform.getSource(), userGraphContext.getOptions(), tag);
+    context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(tag, output));
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewExecutor.java
new file mode 100644
index 0000000..822ed8a
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewExecutor.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import org.apache.beam.sdk.transforms.View;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ * JStorm {@link Executor} for {@link View}.
+ */
+class ViewExecutor implements Executor {
+
+  private final String description;
+  private final TupleTag outputTag;
+  private ExecutorsBolt executorsBolt;
+
+  public ViewExecutor(String description, TupleTag outputTag) {
+    this.description = description;
+    this.outputTag = outputTag;
+  }
+
+  @Override
+  public void init(ExecutorContext context) {
+    this.executorsBolt = context.getExecutorsBolt();
+  }
+
+  @Override
+  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+    executorsBolt.processExecutorElem(outputTag, elem);
+  }
+
+  @Override
+  public void cleanup() {
+  }
+
+  @Override
+  public String toString() {
+    return description;
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewTranslator.java
new file mode 100644
index 0000000..9ab5784
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ViewTranslator.java
@@ -0,0 +1,378 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.CoderRegistry;
+import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.coders.ListCoder;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.View;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.PCollectionViews;
+
+/**
+ * A {@link TransformTranslator} for executing {@link View Views} in JStorm runner.
+ */
+class ViewTranslator
+    extends TransformTranslator.Default<ViewTranslator.CreateJStormPCollectionView<?, ?>> {
+  @Override
+  public void translateNode(
+      CreateJStormPCollectionView<?, ?> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    String description = describeTransform(
+        transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+    ViewExecutor viewExecutor = new ViewExecutor(description, userGraphContext.getOutputTag());
+    context.addTransformExecutor(viewExecutor);
+  }
+
+  /**
+   * Specialized implementation for
+   * {@link org.apache.beam.sdk.transforms.View.AsMap View.AsMap}.
+   */
+  public static class ViewAsMap<K, V>
+      extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, V>>> {
+
+    @SuppressWarnings("unused") // used via reflection in JstormRunner#apply()
+    public ViewAsMap(View.AsMap<K, V> transform) {
+    }
+
+    @Override
+    public PCollectionView<Map<K, V>> expand(PCollection<KV<K, V>> input) {
+      PCollectionView<Map<K, V>> view =
+          PCollectionViews.mapView(
+              input,
+              input.getWindowingStrategy(),
+              input.getCoder());
+
+      @SuppressWarnings({"rawtypes", "unchecked"})
+      KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
+      try {
+        inputCoder.getKeyCoder().verifyDeterministic();
+      } catch (Coder.NonDeterministicException e) {
+        // TODO: log warning as other runners.
+      }
+
+      return input
+          .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
+          .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, V>>of(view));
+    }
+
+    @Override
+    protected String getKindString() {
+      return "StreamingViewAsMap";
+    }
+  }
+
+  /**
+   * Specialized expansion for {@link
+   * View.AsMultimap View.AsMultimap}.
+   */
+  public static class ViewAsMultimap<K, V>
+      extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, Iterable<V>>>> {
+
+    /**
+     * Builds an instance of this class from the overridden transform.
+     */
+    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+    public ViewAsMultimap(View.AsMultimap<K, V> transform) {
+    }
+
+    @Override
+    public PCollectionView<Map<K, Iterable<V>>> expand(PCollection<KV<K, V>> input) {
+      PCollectionView<Map<K, Iterable<V>>> view =
+          PCollectionViews.multimapView(
+              input,
+              input.getWindowingStrategy(),
+              input.getCoder());
+
+      @SuppressWarnings({"rawtypes", "unchecked"})
+      KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
+      try {
+        inputCoder.getKeyCoder().verifyDeterministic();
+      } catch (Coder.NonDeterministicException e) {
+        // TODO: log warning as other runners.
+      }
+
+      return input
+          .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
+          .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, Iterable<V>>>of(view));
+    }
+
+    @Override
+    protected String getKindString() {
+      return "StreamingViewAsMultimap";
+    }
+  }
+
+  /**
+   * Specialized implementation for
+   * {@link View.AsList View.AsList}.
+   */
+  public static class ViewAsList<T>
+      extends PTransform<PCollection<T>, PCollectionView<List<T>>> {
+    /**
+     * Builds an instance of this class from the overridden transform.
+     */
+    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+    public ViewAsList(View.AsList<T> transform) {
+    }
+
+    @Override
+    public PCollectionView<List<T>> expand(PCollection<T> input) {
+      PCollectionView<List<T>> view =
+          PCollectionViews.listView(
+              input,
+              input.getWindowingStrategy(),
+              input.getCoder());
+
+      return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
+          .apply(CreateJStormPCollectionView.<T, List<T>>of(view));
+    }
+
+    @Override
+    protected String getKindString() {
+      return "StreamingViewAsList";
+    }
+  }
+
+  /**
+   * Specialized implementation for
+   * {@link View.AsIterable View.AsIterable} for the
+   * JStorm runner in streaming mode.
+   */
+  public static class ViewAsIterable<T>
+      extends PTransform<PCollection<T>, PCollectionView<Iterable<T>>> {
+    /**
+     * Builds an instance of this class from the overridden transform.
+     */
+    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+    public ViewAsIterable(View.AsIterable<T> transform) {
+    }
+
+    @Override
+    public PCollectionView<Iterable<T>> expand(PCollection<T> input) {
+      PCollectionView<Iterable<T>> view =
+          PCollectionViews.iterableView(
+              input,
+              input.getWindowingStrategy(),
+              input.getCoder());
+
+      return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
+          .apply(CreateJStormPCollectionView.<T, Iterable<T>>of(view));
+    }
+
+    @Override
+    protected String getKindString() {
+      return "StreamingViewAsIterable";
+    }
+  }
+
+  /**
+   * Specialized expansion for
+   * {@link View.AsSingleton View.AsSingleton} for the
+   * JStorm runner in streaming mode.
+   */
+  public static class ViewAsSingleton<T>
+      extends PTransform<PCollection<T>, PCollectionView<T>> {
+    private View.AsSingleton<T> transform;
+
+    /**
+     * Builds an instance of this class from the overridden transform.
+     */
+    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
+    public ViewAsSingleton(View.AsSingleton<T> transform) {
+      this.transform = transform;
+    }
+
+    @Override
+    public PCollectionView<T> expand(PCollection<T> input) {
+      Combine.Globally<T, T> combine = Combine.globally(
+          new SingletonCombine<>(transform.hasDefaultValue(), transform.defaultValue()));
+      if (!transform.hasDefaultValue()) {
+        combine = combine.withoutDefaults();
+      }
+      return input.apply(combine.asSingletonView());
+    }
+
+    @Override
+    protected String getKindString() {
+      return "StreamingViewAsSingleton";
+    }
+
+    private static class SingletonCombine<T> extends Combine.BinaryCombineFn<T> {
+      private boolean hasDefaultValue;
+      private T defaultValue;
+
+      SingletonCombine(boolean hasDefaultValue, T defaultValue) {
+        this.hasDefaultValue = hasDefaultValue;
+        this.defaultValue = defaultValue;
+      }
+
+      @Override
+      public T apply(T left, T right) {
+        throw new IllegalArgumentException("PCollection with more than one element "
+            + "accessed as a singleton view. Consider using Combine.globally().asSingleton() to "
+            + "combine the PCollection into a single value");
+      }
+
+      @Override
+      public T identity() {
+        if (hasDefaultValue) {
+          return defaultValue;
+        } else {
+          throw new IllegalArgumentException(
+              "Empty PCollection accessed as a singleton view. "
+                  + "Consider setting withDefault to provide a default value");
+        }
+      }
+    }
+  }
+
+  /**
+   * Specialized expansion for
+   * {@link org.apache.beam.sdk.transforms.Combine.GloballyAsSingletonView}.
+   * @param <InputT>
+   * @param <OutputT>
+     */
+  public static class CombineGloballyAsSingletonView<InputT, OutputT>
+      extends PTransform<PCollection<InputT>, PCollectionView<OutputT>> {
+    Combine.GloballyAsSingletonView<InputT, OutputT> transform;
+
+    /**
+     * Builds an instance of this class from the overridden transform.
+     */
+    @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply()
+    public CombineGloballyAsSingletonView(
+        Combine.GloballyAsSingletonView<InputT, OutputT> transform) {
+      this.transform = transform;
+    }
+
+    @Override
+    public PCollectionView<OutputT> expand(PCollection<InputT> input) {
+      PCollection<OutputT> combined =
+          input.apply(Combine.globally(transform.getCombineFn())
+              .withoutDefaults()
+              .withFanout(transform.getFanout()));
+
+      PCollectionView<OutputT> view = PCollectionViews.singletonView(
+          combined,
+          combined.getWindowingStrategy(),
+          transform.getInsertDefault(),
+          transform.getInsertDefault()
+              ? transform.getCombineFn().defaultValue() : null,
+          combined.getCoder());
+      return combined
+          .apply(ParDo.of(new WrapAsList<OutputT>()))
+          .apply(CreateJStormPCollectionView.<OutputT, OutputT>of(view));
+    }
+
+    @Override
+    protected String getKindString() {
+      return "StreamingCombineGloballyAsSingletonView";
+    }
+  }
+
+  private static class WrapAsList<T> extends DoFn<T, List<T>> {
+    @ProcessElement
+    public void processElement(ProcessContext c) {
+      c.output(Collections.singletonList(c.element()));
+    }
+  }
+
+  /**
+   * Combiner that combines {@code T}s into a single {@code List<T>} containing all inputs.
+   * They require the input {@link PCollection} fits in memory.
+   * For a large {@link PCollection} this is expected to crash!
+   *
+   * @param <T> the type of elements to concatenate.
+   */
+  private static class Concatenate<T> extends Combine.CombineFn<T, List<T>, List<T>> {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public List<T> createAccumulator() {
+      return new ArrayList<>();
+    }
+
+    @Override
+    public List<T> addInput(List<T> accumulator, T input) {
+      accumulator.add(input);
+      return accumulator;
+    }
+
+    @Override
+    public List<T> mergeAccumulators(Iterable<List<T>> accumulators) {
+      List<T> result = createAccumulator();
+      for (List<T> accumulator : accumulators) {
+        result.addAll(accumulator);
+      }
+      return result;
+    }
+
+    @Override
+    public List<T> extractOutput(List<T> accumulator) {
+      return accumulator;
+    }
+
+    @Override
+    public Coder<List<T>> getAccumulatorCoder(CoderRegistry registry, Coder<T> inputCoder) {
+      return ListCoder.of(inputCoder);
+    }
+
+    @Override
+    public Coder<List<T>> getDefaultOutputCoder(CoderRegistry registry, Coder<T> inputCoder) {
+      return ListCoder.of(inputCoder);
+    }
+  }
+
+  /**
+   * Creates a primitive {@link PCollectionView}.
+   * For internal use only by runner implementors.
+   *
+   * @param <ElemT> The type of the elements of the input PCollection
+   * @param <ViewT> The type associated with the {@link PCollectionView} used as a side input
+   */
+  public static class CreateJStormPCollectionView<ElemT, ViewT>
+      extends PTransform<PCollection<List<ElemT>>, PCollectionView<ViewT>> {
+    private PCollectionView<ViewT> view;
+
+    private CreateJStormPCollectionView(PCollectionView<ViewT> view) {
+      this.view = view;
+    }
+
+    public static <ElemT, ViewT> CreateJStormPCollectionView<ElemT, ViewT> of(
+        PCollectionView<ViewT> view) {
+      return new CreateJStormPCollectionView<>(view);
+    }
+
+    @Override
+    public PCollectionView<ViewT> expand(PCollection<List<ElemT>> input) {
+      return view;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignExecutor.java
new file mode 100644
index 0000000..8d60392
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignExecutor.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+import com.google.common.collect.Iterables;
+import java.util.Collection;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.transforms.windowing.WindowFn;
+import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.TupleTag;
+import org.joda.time.Instant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.windowing.Window.Assign}.
+ * @param <T>
+ * @param <W>
+ */
+class WindowAssignExecutor<T, W extends BoundedWindow> implements Executor {
+  private static final Logger LOG = LoggerFactory.getLogger(WindowAssignExecutor.class);
+
+  private final String description;
+  private WindowFn<T, W> windowFn;
+  private ExecutorsBolt executorsBolt;
+  private TupleTag outputTag;
+
+  class JStormAssignContext<InputT, W extends BoundedWindow>
+      extends WindowFn<InputT, W>.AssignContext {
+    private final WindowedValue<InputT> value;
+
+    JStormAssignContext(WindowFn<InputT, W> fn, WindowedValue<InputT> value) {
+      fn.super();
+      checkArgument(
+          Iterables.size(value.getWindows()) == 1,
+          String.format(
+              "%s passed to window assignment must be in a single window, but it was in %s: %s",
+              WindowedValue.class.getSimpleName(),
+              Iterables.size(value.getWindows()),
+              value.getWindows()));
+      this.value = value;
+    }
+
+    @Override
+    public InputT element() {
+      return value.getValue();
+    }
+
+    @Override
+    public Instant timestamp() {
+      return value.getTimestamp();
+    }
+
+    @Override
+    public BoundedWindow window() {
+      return Iterables.getOnlyElement(value.getWindows());
+    }
+  }
+
+  public WindowAssignExecutor(String description, WindowFn<T, W> windowFn, TupleTag outputTag) {
+    this.description = description;
+    this.windowFn = windowFn;
+    this.outputTag = outputTag;
+  }
+
+  @Override
+  public void init(ExecutorContext context) {
+    this.executorsBolt = context.getExecutorsBolt();
+  }
+
+  @Override
+  public void process(TupleTag tag, WindowedValue elem) {
+    Collection<W> windows = null;
+    try {
+      windows = windowFn.assignWindows(new JStormAssignContext<>(windowFn, elem));
+      for (W window : windows) {
+        executorsBolt.processExecutorElem(
+            outputTag,
+            WindowedValue.of(elem.getValue(), elem.getTimestamp(), window, elem.getPane()));
+      }
+    } catch (Exception e) {
+      LOG.warn("Failed to assign windows for elem=" + elem, e);
+    }
+  }
+
+  @Override
+  public void cleanup() {
+  }
+
+
+  @Override
+  public String toString() {
+    return description;
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignTranslator.java
new file mode 100644
index 0000000..86cb638
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/WindowAssignTranslator.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import org.apache.beam.sdk.transforms.windowing.Window;
+
+/**
+ * Translates a {@link org.apache.beam.sdk.transforms.windowing.Window.Assign} to a
+ * JStorm {@link WindowAssignExecutor}.
+ * @param <T>
+ */
+class WindowAssignTranslator<T> extends TransformTranslator.Default<Window.Assign<T>> {
+
+  @Override
+  public void translateNode(Window.Assign<T> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    String description =
+        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+    context.getUserGraphContext().setWindowed();
+    WindowAssignExecutor executor = new WindowAssignExecutor(
+        description,
+        transform.getWindowFn(),
+        userGraphContext.getOutputTag());
+    context.addTransformExecutor(executor);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/package-info.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/package-info.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/package-info.java
new file mode 100644
index 0000000..f8f2f3f
--- /dev/null
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Implementation of the translation to JStorm topology.
+ */
+package org.apache.beam.runners.jstorm.translation;

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
deleted file mode 100644
index 3d7fab8..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/AbstractComponent.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import backtype.storm.topology.IComponent;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import java.util.HashMap;
-import java.util.Map;
-import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
-
-/**
- * Enable user to add output stream definitions by API, rather than hard-code.
- */
-public abstract class AbstractComponent implements IComponent {
-  private Map<String, Fields> streamToFields = new HashMap<>();
-  private Map<String, Boolean> keyStreams = new HashMap<>();
-  private int parallelismNum = 0;
-
-  public void addOutputField(String streamId) {
-    addOutputField(streamId, new Fields(CommonInstance.VALUE));
-  }
-
-  public void addOutputField(String streamId, Fields fields) {
-    streamToFields.put(streamId, fields);
-    keyStreams.put(streamId, false);
-  }
-
-  public void addKVOutputField(String streamId) {
-    streamToFields.put(streamId, new Fields(CommonInstance.KEY, CommonInstance.VALUE));
-    keyStreams.put(streamId, true);
-  }
-
-  @Override
-  public void declareOutputFields(OutputFieldsDeclarer declarer) {
-    for (Map.Entry<String, Fields> entry : streamToFields.entrySet()) {
-      declarer.declareStream(entry.getKey(), entry.getValue());
-    }
-  }
-
-  public boolean keyedEmit(String streamId) {
-    Boolean isKeyedStream = keyStreams.get(streamId);
-    return isKeyedStream == null ? false : isKeyedStream;
-  }
-
-  public int getParallelismNum() {
-    return parallelismNum;
-  }
-
-  public void setParallelismNum(int num) {
-    parallelismNum = num;
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
deleted file mode 100644
index e07d890..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnExecutor.java
+++ /dev/null
@@ -1,343 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.alibaba.jstorm.metric.MetricClient;
-import com.google.common.collect.Iterables;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import org.apache.beam.runners.core.DoFnRunner;
-import org.apache.beam.runners.core.DoFnRunners;
-import org.apache.beam.runners.core.DoFnRunners.OutputManager;
-import org.apache.beam.runners.core.NullSideInputReader;
-import org.apache.beam.runners.core.PushbackSideInputDoFnRunner;
-import org.apache.beam.runners.core.SideInputHandler;
-import org.apache.beam.runners.core.SimplePushbackSideInputDoFnRunner;
-import org.apache.beam.runners.core.StateInternals;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.runners.core.StateNamespaces;
-import org.apache.beam.runners.core.StateTag;
-import org.apache.beam.runners.core.StateTags;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.jstorm.JStormPipelineOptions;
-import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
-import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
-import org.apache.beam.runners.jstorm.translation.util.DefaultStepContext;
-import org.apache.beam.runners.jstorm.util.SerializedPipelineOptions;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.WatermarkHoldState;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.reflect.DoFnInvoker;
-import org.apache.beam.sdk.transforms.reflect.DoFnInvokers;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.WindowingStrategy;
-import org.joda.time.Instant;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * JStorm {@link Executor} for {@link DoFn}.
- * @param <InputT> input type
- * @param <OutputT> output type
- */
-public class DoFnExecutor<InputT, OutputT> implements Executor {
-  private static final long serialVersionUID = 5297603063991078668L;
-
-  private static final Logger LOG = LoggerFactory.getLogger(DoFnExecutor.class);
-
-  /**
-   * Implements {@link OutputManager} in a DoFn executor.
-   */
-  public class DoFnExecutorOutputManager implements OutputManager, Serializable {
-    private static final long serialVersionUID = -661113364735206170L;
-
-    @Override
-    public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-      executorsBolt.processExecutorElem(tag, output);
-    }
-  }
-
-  protected transient DoFnRunner<InputT, OutputT> runner = null;
-  protected transient PushbackSideInputDoFnRunner<InputT, OutputT> pushbackRunner = null;
-
-  protected final String stepName;
-
-  protected int internalDoFnExecutorId;
-
-  protected final String description;
-
-  protected final TupleTag<OutputT> mainTupleTag;
-  protected final List<TupleTag<?>> sideOutputTags;
-
-  protected SerializedPipelineOptions serializedOptions;
-  protected transient JStormPipelineOptions pipelineOptions;
-
-  protected DoFn<InputT, OutputT> doFn;
-  protected final Coder<WindowedValue<InputT>> inputCoder;
-  protected DoFnInvoker<InputT, OutputT> doFnInvoker;
-  protected OutputManager outputManager;
-  protected WindowingStrategy<?, ?> windowingStrategy;
-  protected final TupleTag<InputT> mainInputTag;
-  protected Collection<PCollectionView<?>> sideInputs;
-  protected SideInputHandler sideInputHandler;
-  protected final Map<TupleTag, PCollectionView<?>> sideInputTagToView;
-
-  // Initialize during runtime
-  protected ExecutorContext executorContext;
-  protected ExecutorsBolt executorsBolt;
-  protected TimerInternals timerInternals;
-  protected transient StateInternals pushbackStateInternals;
-  protected transient StateTag<BagState<WindowedValue<InputT>>> pushedBackTag;
-  protected transient StateTag<WatermarkHoldState> watermarkHoldTag;
-  protected transient IKvStoreManager kvStoreManager;
-  protected DefaultStepContext stepContext;
-  protected transient MetricClient metricClient;
-
-  public DoFnExecutor(
-      String stepName,
-      String description,
-      JStormPipelineOptions pipelineOptions,
-      DoFn<InputT, OutputT> doFn,
-      Coder<WindowedValue<InputT>> inputCoder,
-      WindowingStrategy<?, ?> windowingStrategy,
-      TupleTag<InputT> mainInputTag,
-      Collection<PCollectionView<?>> sideInputs,
-      Map<TupleTag, PCollectionView<?>> sideInputTagToView,
-      TupleTag<OutputT> mainTupleTag,
-      List<TupleTag<?>> sideOutputTags) {
-    this.stepName = checkNotNull(stepName, "stepName");
-    this.description = checkNotNull(description, "description");
-    this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
-    this.doFn = doFn;
-    this.inputCoder = inputCoder;
-    this.outputManager = new DoFnExecutorOutputManager();
-    this.windowingStrategy = windowingStrategy;
-    this.mainInputTag = mainInputTag;
-    this.sideInputs = sideInputs;
-    this.mainTupleTag = mainTupleTag;
-    this.sideOutputTags = sideOutputTags;
-    this.sideInputTagToView = sideInputTagToView;
-  }
-
-  protected DoFnRunner<InputT, OutputT> getDoFnRunner() {
-    return new DoFnRunnerWithMetrics<>(
-        stepName,
-        DoFnRunners.simpleRunner(
-            this.pipelineOptions,
-            this.doFn,
-            this.sideInputHandler == null ? NullSideInputReader.empty() : sideInputHandler,
-            this.outputManager,
-            this.mainTupleTag,
-            this.sideOutputTags,
-            this.stepContext,
-            this.windowingStrategy),
-        MetricsReporter.create(metricClient));
-  }
-
-  protected void initService(ExecutorContext context) {
-    // TODO: what should be set for key in here?
-    timerInternals = new JStormTimerInternals(
-        null /* key */, this, context.getExecutorsBolt().timerService());
-    kvStoreManager = context.getKvStoreManager();
-    stepContext = new DefaultStepContext(timerInternals,
-        new JStormStateInternals(
-            null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId));
-    metricClient = new MetricClient(executorContext.getTopologyContext());
-  }
-
-  @Override
-  public void init(ExecutorContext context) {
-    this.executorContext = context;
-    this.executorsBolt = context.getExecutorsBolt();
-    this.pipelineOptions =
-        this.serializedOptions.getPipelineOptions().as(JStormPipelineOptions.class);
-
-    initService(context);
-
-    // Side inputs setup
-    if (sideInputs != null && !sideInputs.isEmpty()) {
-      pushedBackTag = StateTags.bag("pushed-back-values", inputCoder);
-      watermarkHoldTag =
-          StateTags.watermarkStateInternal("hold", TimestampCombiner.EARLIEST);
-      pushbackStateInternals = new JStormStateInternals(
-          null, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
-      sideInputHandler = new SideInputHandler(sideInputs, pushbackStateInternals);
-      runner = getDoFnRunner();
-      pushbackRunner =
-          SimplePushbackSideInputDoFnRunner.create(runner, sideInputs, sideInputHandler);
-    } else {
-      runner = getDoFnRunner();
-    }
-
-    // Process user's setup
-    doFnInvoker = DoFnInvokers.invokerFor(doFn);
-    doFnInvoker.invokeSetup();
-  }
-
-  @Override
-  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-    LOG.debug(String.format("process: elemTag=%s, mainInputTag=%s, sideInputs=%s, elem={}",
-        tag, mainInputTag, sideInputs, elem.getValue()));
-    if (mainInputTag.equals(tag)) {
-      processMainInput(elem);
-    } else {
-      processSideInput(tag, elem);
-    }
-  }
-
-  protected <T> void processMainInput(WindowedValue<T> elem) {
-    if (sideInputs.isEmpty()) {
-      runner.processElement((WindowedValue<InputT>) elem);
-    } else {
-      Iterable<WindowedValue<InputT>> justPushedBack =
-          pushbackRunner.processElementInReadyWindows((WindowedValue<InputT>) elem);
-      BagState<WindowedValue<InputT>> pushedBack =
-          pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
-
-      Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
-      for (WindowedValue<InputT> pushedBackValue : justPushedBack) {
-        if (pushedBackValue.getTimestamp().isBefore(min)) {
-          min = pushedBackValue.getTimestamp();
-        }
-        min = earlier(min, pushedBackValue.getTimestamp());
-        pushedBack.add(pushedBackValue);
-      }
-      pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag).add(min);
-    }
-  }
-
-  protected void processSideInput(TupleTag tag, WindowedValue elem) {
-    LOG.debug(String.format("side inputs: %s, %s.", tag, elem));
-
-    PCollectionView<?> sideInputView = sideInputTagToView.get(tag);
-    sideInputHandler.addSideInputValue(sideInputView, elem);
-
-    BagState<WindowedValue<InputT>> pushedBack =
-        pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
-
-    List<WindowedValue<InputT>> newPushedBack = new ArrayList<>();
-
-    Iterable<WindowedValue<InputT>> pushedBackInputs = pushedBack.read();
-    if (pushedBackInputs != null) {
-      for (WindowedValue<InputT> input : pushedBackInputs) {
-
-        Iterable<WindowedValue<InputT>> justPushedBack =
-            pushbackRunner.processElementInReadyWindows(input);
-        Iterables.addAll(newPushedBack, justPushedBack);
-      }
-    }
-    pushedBack.clear();
-
-    Instant min = BoundedWindow.TIMESTAMP_MAX_VALUE;
-    for (WindowedValue<InputT> pushedBackValue : newPushedBack) {
-      min = earlier(min, pushedBackValue.getTimestamp());
-      pushedBack.add(pushedBackValue);
-    }
-
-    WatermarkHoldState watermarkHold =
-        pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
-    // TODO: clear-then-add is not thread-safe.
-    watermarkHold.clear();
-    watermarkHold.add(min);
-  }
-
-  /**
-   * Process all pushed back elements when receiving watermark with max timestamp.
-   */
-  public void processAllPushBackElements() {
-    if (sideInputs != null && !sideInputs.isEmpty()) {
-      BagState<WindowedValue<InputT>> pushedBackElements =
-          pushbackStateInternals.state(StateNamespaces.global(), pushedBackTag);
-      if (pushedBackElements != null) {
-        for (WindowedValue<InputT> elem : pushedBackElements.read()) {
-          LOG.info("Process pushback elem={}", elem);
-          runner.processElement(elem);
-        }
-        pushedBackElements.clear();
-      }
-
-      WatermarkHoldState watermarkHold =
-          pushbackStateInternals.state(StateNamespaces.global(), watermarkHoldTag);
-      watermarkHold.clear();
-      watermarkHold.add(BoundedWindow.TIMESTAMP_MAX_VALUE);
-    }
-  }
-
-  public void onTimer(Object key, TimerInternals.TimerData timerData) {
-    StateNamespace namespace = timerData.getNamespace();
-    checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
-    BoundedWindow window = ((StateNamespaces.WindowNamespace) namespace).getWindow();
-    if (pushbackRunner != null) {
-      pushbackRunner.onTimer(
-          timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
-    } else {
-      runner.onTimer(
-          timerData.getTimerId(), window, timerData.getTimestamp(), timerData.getDomain());
-    }
-  }
-
-  @Override
-  public void cleanup() {
-    doFnInvoker.invokeTeardown();
-  }
-
-  @Override
-  public String toString() {
-    return description;
-  }
-
-  private Instant earlier(Instant left, Instant right) {
-    return left.isBefore(right) ? left : right;
-  }
-
-  public void startBundle() {
-    if (pushbackRunner != null) {
-      pushbackRunner.startBundle();
-    } else {
-      runner.startBundle();
-    }
-  }
-
-  public void finishBundle() {
-    if (pushbackRunner != null) {
-      pushbackRunner.finishBundle();
-    } else {
-      runner.finishBundle();
-    }
-  }
-
-  public void setInternalDoFnExecutorId(int id) {
-    this.internalDoFnExecutorId = id;
-  }
-
-  public int getInternalDoFnExecutorId() {
-    return internalDoFnExecutorId;
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java
deleted file mode 100644
index 1610a8a..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/DoFnRunnerWithMetrics.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import java.io.Closeable;
-import java.io.IOException;
-import org.apache.beam.runners.core.DoFnRunner;
-import org.apache.beam.sdk.metrics.MetricsContainer;
-import org.apache.beam.sdk.metrics.MetricsEnvironment;
-import org.apache.beam.sdk.state.TimeDomain;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.joda.time.Instant;
-
-/**
- * DoFnRunner decorator which registers {@link MetricsContainer}.
- */
-public class DoFnRunnerWithMetrics<InputT, OutputT> implements DoFnRunner<InputT, OutputT> {
-
-  private final String stepName;
-  private final DoFnRunner<InputT, OutputT> delegate;
-  private final MetricsReporter metricsReporter;
-
-  DoFnRunnerWithMetrics(
-      String stepName,
-      DoFnRunner<InputT, OutputT> delegate,
-      MetricsReporter metricsReporter) {
-    this.stepName = checkNotNull(stepName, "stepName");
-    this.delegate = checkNotNull(delegate, "delegate");
-    this.metricsReporter = checkNotNull(metricsReporter, "metricsReporter");
-  }
-
-  @Override
-  public void startBundle() {
-    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
-        metricsReporter.getMetricsContainer(stepName))) {
-      delegate.startBundle();
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void processElement(WindowedValue<InputT> elem) {
-    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
-        metricsReporter.getMetricsContainer(stepName))) {
-      delegate.processElement(elem);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void onTimer(
-      String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) {
-    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
-        metricsReporter.getMetricsContainer(stepName))) {
-      delegate.onTimer(timerId, window, timestamp, timeDomain);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void finishBundle() {
-    try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(
-        metricsReporter.getMetricsContainer(stepName))) {
-      delegate.finishBundle();
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-    metricsReporter.updateMetrics();
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
deleted file mode 100644
index 0ec4fdd..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/Executor.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import java.io.Serializable;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-/**
- * An executor is a basic executable unit in a JStorm task.
- */
-public interface Executor extends Serializable {
-  /**
-   * Initialization during runtime.
-   */
-  void init(ExecutorContext context);
-
-  <T> void process(TupleTag<T> tag, WindowedValue<T> elem);
-
-  void cleanup();
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
deleted file mode 100644
index 55ca171..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorContext.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import backtype.storm.task.TopologyContext;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.google.auto.value.AutoValue;
-
-/**
- * Context of a executors bolt when runtime.
- */
-@AutoValue
-public abstract class ExecutorContext {
-  public static ExecutorContext of(
-      TopologyContext topologyContext,
-      ExecutorsBolt bolt,
-      IKvStoreManager kvStoreManager) {
-    return new AutoValue_ExecutorContext(topologyContext, bolt, kvStoreManager);
-  }
-
-  public abstract TopologyContext getTopologyContext();
-
-  public abstract ExecutorsBolt getExecutorsBolt();
-
-  public abstract IKvStoreManager getKvStoreManager();
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
deleted file mode 100644
index 0366c13..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ExecutorsBolt.java
+++ /dev/null
@@ -1,339 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichBatchBolt;
-import backtype.storm.tuple.ITupleExt;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.alibaba.jstorm.cache.KvStoreManagerFactory;
-import com.alibaba.jstorm.cluster.Common;
-import com.alibaba.jstorm.utils.KryoSerializer;
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.TupleTag;
-import org.joda.time.Instant;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * ExecutorsBolt is a JStorm Bolt composited with several executors chained in a sub-DAG.
- */
-public class ExecutorsBolt extends AbstractComponent implements IRichBatchBolt {
-  private static final long serialVersionUID = -7751043327801735211L;
-
-  private static final Logger LOG = LoggerFactory.getLogger(ExecutorsBolt.class);
-
-  protected ExecutorContext executorContext;
-
-  protected TimerService timerService;
-
-  // map from input tag to executor inside bolt
-  protected final Map<TupleTag, Executor> inputTagToExecutor = Maps.newHashMap();
-  // set of all output tags that will be emit outside bolt
-  protected final Set<TupleTag> outputTags = Sets.newHashSet();
-  protected final Set<TupleTag> externalOutputTags = Sets.newHashSet();
-  protected final Set<DoFnExecutor> doFnExecutors = Sets.newHashSet();
-  protected int internalDoFnExecutorId = 1;
-  protected final Map<Integer, DoFnExecutor> idToDoFnExecutor = Maps.newHashMap();
-
-  protected OutputCollector collector;
-
-  protected boolean isStatefulBolt = false;
-
-  protected KryoSerializer<WindowedValue> serializer;
-
-  public ExecutorsBolt() {
-
-  }
-
-  public void setStatefulBolt(boolean isStateful) {
-    isStatefulBolt = isStateful;
-  }
-
-  public void addExecutor(TupleTag inputTag, Executor executor) {
-    inputTagToExecutor.put(
-        checkNotNull(inputTag, "inputTag"),
-        checkNotNull(executor, "executor"));
-  }
-
-  public Map<TupleTag, Executor> getExecutors() {
-    return inputTagToExecutor;
-  }
-
-  public void registerExecutor(Executor executor) {
-    if (executor instanceof DoFnExecutor) {
-      DoFnExecutor doFnExecutor = (DoFnExecutor) executor;
-      idToDoFnExecutor.put(internalDoFnExecutorId, doFnExecutor);
-      doFnExecutor.setInternalDoFnExecutorId(internalDoFnExecutorId);
-      internalDoFnExecutorId++;
-    }
-  }
-
-  public Map<Integer, DoFnExecutor> getIdToDoFnExecutor() {
-    return idToDoFnExecutor;
-  }
-
-  public void addOutputTags(TupleTag tag) {
-    outputTags.add(tag);
-  }
-
-  public void addExternalOutputTag(TupleTag<?> tag) {
-    externalOutputTags.add(tag);
-  }
-
-  public Set<TupleTag> getOutputTags() {
-    return outputTags;
-  }
-
-  public ExecutorContext getExecutorContext() {
-    return executorContext;
-  }
-
-  @Override
-  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
-    LOG.info("Start to prepare for task-{}", context.getThisTaskId());
-    try {
-      this.collector = collector;
-
-      // init kv store manager
-      String storeName = String.format("task-%d", context.getThisTaskId());
-      String stateStorePath = String.format("%s/beam/%s", context.getWorkerIdDir(), storeName);
-      IKvStoreManager kvStoreManager = isStatefulBolt
-              ? KvStoreManagerFactory.getKvStoreManagerWithMonitor(
-              context, storeName, stateStorePath, isStatefulBolt)
-              : KvStoreManagerFactory.getKvStoreManager(
-              stormConf, storeName, stateStorePath, isStatefulBolt);
-      this.executorContext = ExecutorContext.of(context, this, kvStoreManager);
-
-      // init time service
-      timerService = initTimerService();
-
-      // init all internal executors
-      for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
-        executor.init(executorContext);
-        if (executor instanceof DoFnExecutor) {
-          doFnExecutors.add((DoFnExecutor) executor);
-        }
-      }
-
-      this.serializer = new KryoSerializer<WindowedValue>(stormConf);
-
-      LOG.info("ExecutorsBolt finished init. LocalExecutors={}", inputTagToExecutor.values());
-      LOG.info("inputTagToExecutor={}", inputTagToExecutor);
-      LOG.info("outputTags={}", outputTags);
-      LOG.info("externalOutputTags={}", externalOutputTags);
-      LOG.info("doFnExecutors={}", doFnExecutors);
-    } catch (IOException e) {
-      throw new RuntimeException("Failed to prepare executors bolt", e);
-    }
-  }
-
-  public TimerService initTimerService() {
-    TopologyContext context = executorContext.getTopologyContext();
-    List<Integer> tasks = FluentIterable.from(context.getThisSourceComponentTasks().entrySet())
-        .transformAndConcat(
-            new Function<Map.Entry<String, List<Integer>>, Iterable<Integer>>() {
-              @Override
-              public Iterable<Integer> apply(Map.Entry<String, List<Integer>> value) {
-                if (Common.isSystemComponent(value.getKey())) {
-                  return Collections.EMPTY_LIST;
-                } else {
-                  return value.getValue();
-                }
-              }
-            })
-        .toList();
-    TimerService ret = new TimerServiceImpl(executorContext);
-    ret.init(tasks);
-    return ret;
-  }
-
-  @Override
-  public void execute(Tuple input) {
-    // process a batch
-    String streamId = input.getSourceStreamId();
-    ITupleExt tuple = (ITupleExt) input;
-    Iterator<List<Object>> valueIterator = tuple.batchValues().iterator();
-    if (CommonInstance.BEAM_WATERMARK_STREAM_ID.equals(streamId)) {
-      while (valueIterator.hasNext()) {
-        processWatermark((Long) valueIterator.next().get(0), input.getSourceTask());
-      }
-    } else {
-      doFnStartBundle();
-      while (valueIterator.hasNext()) {
-        processElement(valueIterator.next(), streamId);
-      }
-      doFnFinishBundle();
-    }
-  }
-
-  private void processWatermark(long watermarkTs, int sourceTask) {
-    long newWaterMark = timerService.updateInputWatermark(sourceTask, watermarkTs);
-    LOG.debug("Recv waterMark-{} from task-{}, newWaterMark={}",
-        (new Instant(watermarkTs)).toDateTime(),
-        sourceTask,
-        (new Instant(newWaterMark)).toDateTime());
-    if (newWaterMark != 0) {
-      // Some buffer windows are going to be triggered.
-      doFnStartBundle();
-      timerService.fireTimers(newWaterMark);
-
-      // SideInput: If receiving water mark with max timestamp, It means no more data is supposed
-      // to be received from now on. So we are going to process all push back data.
-      if (newWaterMark == BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis()) {
-        for (DoFnExecutor doFnExecutor : doFnExecutors) {
-          doFnExecutor.processAllPushBackElements();
-        }
-      }
-
-      doFnFinishBundle();
-    }
-
-    long currentWaterMark = timerService.currentOutputWatermark();
-    if (!externalOutputTags.isEmpty()) {
-      collector.flush();
-      collector.emit(
-          CommonInstance.BEAM_WATERMARK_STREAM_ID,
-          new Values(currentWaterMark));
-      LOG.debug("Send waterMark-{}", (new Instant(currentWaterMark)).toDateTime());
-    }
-  }
-
-  private void processElement(List<Object> values, String streamId) {
-    TupleTag inputTag = new TupleTag(streamId);
-    WindowedValue windowedValue = retrieveWindowedValueFromTupleValue(values);
-    processExecutorElem(inputTag, windowedValue);
-  }
-
-  public <T> void processExecutorElem(TupleTag<T> inputTag, WindowedValue<T> elem) {
-    LOG.debug("ProcessExecutorElem: inputTag={}, value={}", inputTag, elem.getValue());
-    if (elem != null) {
-      Executor executor = inputTagToExecutor.get(inputTag);
-      if (executor != null) {
-        executor.process(inputTag, elem);
-      }
-      if (externalOutputTags.contains(inputTag)) {
-        emitOutsideBolt(inputTag, elem);
-      }
-    } else {
-      LOG.info("Received null elem for tag={}", inputTag);
-    }
-  }
-
-  @Override
-  public void cleanup() {
-    for (Executor executor : Sets.newHashSet(inputTagToExecutor.values())) {
-      executor.cleanup();
-    }
-    executorContext.getKvStoreManager().close();
-  }
-
-  @Override
-  public Map<String, Object> getComponentConfiguration() {
-    return null;
-  }
-
-  public TimerService timerService() {
-    return timerService;
-  }
-
-  public void setTimerService(TimerService service) {
-    timerService = service;
-  }
-
-  private WindowedValue retrieveWindowedValueFromTupleValue(List<Object> values) {
-    WindowedValue wv = null;
-    if (values.size() > 1) {
-      Object key = values.get(0);
-      WindowedValue value = serializer.deserialize((byte[]) values.get(1));
-      wv = value.withValue(KV.of(key, value.getValue()));
-    } else {
-      wv = serializer.deserialize((byte[]) values.get(0));
-    }
-    return wv;
-  }
-
-  protected void emitOutsideBolt(TupleTag outputTag, WindowedValue outputValue) {
-    LOG.debug("Output outside: tag={}, value={}", outputTag, outputValue.getValue());
-    if (keyedEmit(outputTag.getId())) {
-      KV kv = (KV) outputValue.getValue();
-      byte[] immutableOutputValue = serializer.serialize(outputValue.withValue(kv.getValue()));
-      // Convert WindowedValue<KV> to <K, WindowedValue<V>>
-      if (kv.getKey() == null) {
-        // If key is null, emit "null" string here. Because, null value will be ignored in JStorm.
-        collector.emit(outputTag.getId(), new Values("null", immutableOutputValue));
-      } else {
-        collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableOutputValue));
-      }
-    } else {
-      byte[] immutableOutputValue = serializer.serialize(outputValue);
-      collector.emit(outputTag.getId(), new Values(immutableOutputValue));
-    }
-  }
-
-  private void doFnStartBundle() {
-    for (DoFnExecutor doFnExecutor : doFnExecutors) {
-      doFnExecutor.startBundle();
-    }
-  }
-
-  private void doFnFinishBundle() {
-    for (DoFnExecutor doFnExecutor : doFnExecutors) {
-      doFnExecutor.finishBundle();
-    }
-  }
-
-  @Override
-  public String toString() {
-    // LOG.info("bolt: " + executorContext.getTopologyContext().toJSONString());
-    List<String> ret = new ArrayList<>();
-        /*ret.add("inputTags");
-        for (TupleTag inputTag : inputTagToExecutor.keySet()) {
-            ret.add(inputTag.getId());
-        }*/
-    ret.add("internalExecutors");
-    for (Executor executor : inputTagToExecutor.values()) {
-      ret.add(executor.toString());
-    }
-    ret.add("externalOutputTags");
-    for (TupleTag output : externalOutputTags) {
-      ret.add(output.getId());
-    }
-    return Joiner.on('\n').join(ret).concat("\n");
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
deleted file mode 100644
index caf1e47..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/FlattenExecutor.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-/**
- * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.Flatten}.
- * @param <InputT>
- */
-public class FlattenExecutor<InputT> implements Executor {
-
-  private final String description;
-  private TupleTag mainOutputTag;
-  private ExecutorContext context;
-  private ExecutorsBolt executorsBolt;
-
-  public FlattenExecutor(String description, TupleTag mainTupleTag) {
-    this.description = checkNotNull(description, "description");
-    this.mainOutputTag = mainTupleTag;
-  }
-
-  @Override
-  public void init(ExecutorContext context) {
-    this.context = context;
-    this.executorsBolt = context.getExecutorsBolt();
-  }
-
-  @Override
-  public void process(TupleTag tag, WindowedValue elem) {
-    executorsBolt.processExecutorElem(mainOutputTag, elem);
-  }
-
-  @Override
-  public void cleanup() {
-  }
-
-  @Override
-  public String toString() {
-    return description;
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
deleted file mode 100644
index 0dd1af9..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/GroupByWindowExecutor.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import static com.google.common.base.Preconditions.checkArgument;
-
-import com.google.common.collect.ImmutableList;
-import java.io.Serializable;
-import java.util.List;
-import org.apache.beam.runners.core.DoFnRunner;
-import org.apache.beam.runners.core.DoFnRunners;
-import org.apache.beam.runners.core.GroupAlsoByWindowViaWindowSetNewDoFn;
-import org.apache.beam.runners.core.KeyedWorkItem;
-import org.apache.beam.runners.core.KeyedWorkItems;
-import org.apache.beam.runners.core.NullSideInputReader;
-import org.apache.beam.runners.core.StateInternals;
-import org.apache.beam.runners.core.StateInternalsFactory;
-import org.apache.beam.runners.core.StateNamespace;
-import org.apache.beam.runners.core.StateNamespaces;
-import org.apache.beam.runners.core.SystemReduceFn;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.core.TimerInternalsFactory;
-import org.apache.beam.runners.jstorm.JStormPipelineOptions;
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.TranslationContext.UserGraphContext;
-import org.apache.beam.runners.jstorm.translation.runtime.state.JStormStateInternals;
-import org.apache.beam.runners.jstorm.translation.runtime.timer.JStormTimerInternals;
-import org.apache.beam.runners.jstorm.util.RunnerUtils;
-import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.WindowingStrategy;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * JStorm {@link Executor} for {@link org.apache.beam.sdk.transforms.GroupByKey}.
- * @param <K>
- * @param <V>
- */
-public class GroupByWindowExecutor<K, V>
-    extends DoFnExecutor<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> {
-  private static final long serialVersionUID = -7563050475488610553L;
-
-  private static final Logger LOG = LoggerFactory.getLogger(GroupByWindowExecutor.class);
-
-  private class GroupByWindowOutputManager implements DoFnRunners.OutputManager, Serializable {
-
-    @Override
-    public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-      executorsBolt.processExecutorElem(tag, output);
-    }
-  }
-
-  private KvCoder<K, V> inputKvCoder;
-  private SystemReduceFn<K, V, Iterable<V>, Iterable<V>, BoundedWindow> reduceFn;
-
-  public GroupByWindowExecutor(
-      String stepName,
-      String description,
-      TranslationContext context,
-      JStormPipelineOptions pipelineOptions,
-      WindowingStrategy<?, ?> windowingStrategy,
-      TupleTag<KV<K, Iterable<V>>> mainTupleTag, List<TupleTag<?>> sideOutputTags) {
-    // The doFn will be created when runtime. Just pass "null" here
-    super(
-        stepName,
-        description,
-        pipelineOptions,
-        null,
-        null,
-        windowingStrategy,
-        null,
-        null,
-        null,
-        mainTupleTag,
-        sideOutputTags);
-
-    this.outputManager = new GroupByWindowOutputManager();
-    UserGraphContext userGraphContext = context.getUserGraphContext();
-    PCollection<KV<K, V>> input = (PCollection<KV<K, V>>) userGraphContext.getInput();
-    this.inputKvCoder = (KvCoder<K, V>) input.getCoder();
-  }
-
-  private DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getGroupByWindowDoFn() {
-    final StateInternalsFactory<K> stateFactory = new StateInternalsFactory<K>() {
-      @Override
-      public StateInternals stateInternalsForKey(K key) {
-        return new JStormStateInternals<K>(
-            key, kvStoreManager, executorsBolt.timerService(), internalDoFnExecutorId);
-      }
-    };
-    TimerInternalsFactory<K> timerFactory = new TimerInternalsFactory<K>() {
-      @Override
-      public TimerInternals timerInternalsForKey(K key) {
-        return new JStormTimerInternals<>(
-            key,
-            GroupByWindowExecutor.this,
-            executorContext.getExecutorsBolt().timerService());
-      }
-    };
-
-    reduceFn = SystemReduceFn.buffering(inputKvCoder.getValueCoder());
-    DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFn =
-        GroupAlsoByWindowViaWindowSetNewDoFn.create(
-            windowingStrategy, stateFactory, timerFactory, NullSideInputReader.empty(),
-            (SystemReduceFn) reduceFn, outputManager, mainTupleTag);
-    return doFn;
-  }
-
-  @Override
-  protected DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> getDoFnRunner() {
-    doFn = getGroupByWindowDoFn();
-
-    DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> simpleRunner = DoFnRunners.simpleRunner(
-        this.pipelineOptions,
-        this.doFn,
-        NullSideInputReader.empty(),
-        this.outputManager,
-        this.mainTupleTag,
-        this.sideOutputTags,
-        this.stepContext,
-        this.windowingStrategy);
-
-    DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> doFnRunner =
-        DoFnRunners.lateDataDroppingRunner(
-            simpleRunner,
-            this.stepContext,
-            this.windowingStrategy);
-    return new DoFnRunnerWithMetrics<>(
-        stepName, doFnRunner, MetricsReporter.create(metricClient));
-  }
-
-  @Override
-  public void process(TupleTag tag, WindowedValue elem) {
-    /**
-     *  For GroupByKey, KV type elem is received. We need to convert the KV elem
-     *  into KeyedWorkItem first, which is the expected type in LateDataDroppingDoFnRunner.
-     */
-    KeyedWorkItem<K, V> keyedWorkItem = RunnerUtils.toKeyedWorkItem((WindowedValue<KV<K, V>>) elem);
-    runner.processElement(elem.withValue(keyedWorkItem));
-  }
-
-  @Override
-  public void onTimer(Object key, TimerInternals.TimerData timerData) {
-    StateNamespace namespace = timerData.getNamespace();
-    checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
-
-    runner.processElement(
-        WindowedValue.valueInGlobalWindow(
-            KeyedWorkItems.<K, V>timersWorkItem((K) key, ImmutableList.of(timerData))));
-  }
-
-  @Override
-  public String toString() {
-    return super.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MetricsReporter.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MetricsReporter.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MetricsReporter.java
deleted file mode 100644
index a022440..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/MetricsReporter.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-import static org.apache.beam.runners.core.metrics.MetricsContainerStepMap.asAttemptedOnlyMetricResults;
-
-import com.alibaba.jstorm.common.metric.AsmCounter;
-import com.alibaba.jstorm.metric.MetricClient;
-import com.google.common.collect.Maps;
-import java.util.Map;
-import org.apache.beam.runners.core.metrics.MetricsContainerStepMap;
-import org.apache.beam.sdk.metrics.MetricQueryResults;
-import org.apache.beam.sdk.metrics.MetricResult;
-import org.apache.beam.sdk.metrics.MetricResults;
-import org.apache.beam.sdk.metrics.MetricsContainer;
-import org.apache.beam.sdk.metrics.MetricsFilter;
-
-/**
- * Class that holds a {@link MetricsContainerStepMap}, and reports metrics to JStorm engine.
- */
-public class MetricsReporter {
-
-  private static final String METRIC_KEY_SEPARATOR = "__";
-  private static final String COUNTER_PREFIX = "__counter";
-
-  private final MetricsContainerStepMap metricsContainers = new MetricsContainerStepMap();
-  private final Map<String, Long> reportedCounters = Maps.newHashMap();
-  private final MetricClient metricClient;
-
-  public static MetricsReporter create(MetricClient metricClient) {
-    return new MetricsReporter(metricClient);
-  }
-
-  private MetricsReporter(MetricClient metricClient) {
-    this.metricClient = checkNotNull(metricClient, "metricClient");
-  }
-
-  public MetricsContainer getMetricsContainer(String stepName) {
-    return metricsContainers.getContainer(stepName);
-  }
-
-  public void updateMetrics() {
-    MetricResults metricResults = asAttemptedOnlyMetricResults(metricsContainers);
-    MetricQueryResults metricQueryResults =
-        metricResults.queryMetrics(MetricsFilter.builder().build());
-    updateCounters(metricQueryResults.counters());
-  }
-
-  private void updateCounters(Iterable<MetricResult<Long>> counters) {
-    System.out.print("updateCounters");
-    for (MetricResult<Long> metricResult : counters) {
-      String metricName = getMetricNameString(COUNTER_PREFIX, metricResult);
-      System.out.print("metricName: " + metricName);
-      Long updateValue = metricResult.attempted();
-      Long oldValue = reportedCounters.get(metricName);
-
-      if (oldValue == null || oldValue < updateValue) {
-        AsmCounter counter = metricClient.registerCounter(metricName);
-        Long incValue = (oldValue == null ? updateValue : updateValue - oldValue);
-        counter.update(incValue);
-      }
-    }
-  }
-
-  private String getMetricNameString(String prefix, MetricResult<?> metricResult) {
-    return prefix
-        + METRIC_KEY_SEPARATOR + metricResult.step()
-        + METRIC_KEY_SEPARATOR + metricResult.name().namespace()
-        + METRIC_KEY_SEPARATOR + metricResult.name().name();
-  }
-}


[28/53] [abbrv] beam git commit: jstorm-runner: 1. Use the TupleTag of "PCollection expand" when getting input tags and output tags 2. Check the exception record when asserting of unit test

Posted by pe...@apache.org.
jstorm-runner:
1. Use the TupleTag of "PCollection expand" when getting input tags and output tags
2. Check the exception record when asserting of unit test


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/30f3eda6
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/30f3eda6
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/30f3eda6

Branch: refs/heads/jstorm-runner
Commit: 30f3eda64c68cea092c42b7acc1dfd98eb8cbbd0
Parents: df154de
Author: basti.lj <ba...@alibaba-inc.com>
Authored: Mon Jul 17 15:55:01 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:58 2017 +0800

----------------------------------------------------------------------
 .../beam/runners/jstorm/TestJStormRunner.java       |  9 +++++++++
 .../runners/jstorm/translation/DoFnExecutor.java    |  6 +++++-
 .../jstorm/translation/MultiOutputDoFnExecutor.java |  6 +-----
 .../jstorm/translation/ParDoBoundTranslator.java    |  5 +++--
 .../jstorm/translation/TranslationContext.java      | 16 +++++++++-------
 5 files changed, 27 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/30f3eda6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
index b1b0379..a117675 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/TestJStormRunner.java
@@ -8,6 +8,7 @@ import com.alibaba.jstorm.metric.AsmWindow;
 import com.alibaba.jstorm.metric.JStormMetrics;
 import com.alibaba.jstorm.metric.MetaType;
 import com.alibaba.jstorm.metric.MetricType;
+import com.alibaba.jstorm.task.error.TaskReportErrorAndDie;
 import com.alibaba.jstorm.utils.JStormUtils;
 import com.google.common.base.Optional;
 import com.google.common.collect.Maps;
@@ -56,14 +57,21 @@ public class TestJStormRunner extends PipelineRunner<JStormRunnerResult> {
       if (numberOfAssertions == 0) {
         // If assert number is zero, wait 5 sec
         JStormUtils.sleepMs(5000);
+        Exception taskExceptionRec = TaskReportErrorAndDie.getExceptionRecord();
+        if (taskExceptionRec != null) {
+          throw new RuntimeException(taskExceptionRec.getCause());
+        }
         return result;
       } else {
         for (int i = 0; i < 40; ++i) {
           Optional<Boolean> success = checkForPAssertSuccess(numberOfAssertions);
+          Exception taskExceptionRec = TaskReportErrorAndDie.getExceptionRecord();
           if (success.isPresent() && success.get()) {
             return result;
           } else if (success.isPresent() && !success.get()) {
             throw new AssertionError("Failed assertion checks.");
+          } else if (taskExceptionRec != null) {
+            throw new RuntimeException(taskExceptionRec.getCause());
           } else {
             JStormUtils.sleepMs(500);
           }
@@ -74,6 +82,7 @@ public class TestJStormRunner extends PipelineRunner<JStormRunnerResult> {
     } finally {
       clearPAssertCount();
       cancel(result);
+      TaskReportErrorAndDie.setExceptionRecord(null);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/30f3eda6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
index fdd9af6..6baa944 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/DoFnExecutor.java
@@ -203,8 +203,12 @@ class DoFnExecutor<InputT, OutputT> implements Executor {
         tag, mainInputTag, sideInputs, elem.getValue()));
     if (mainInputTag.equals(tag)) {
       processMainInput(elem);
-    } else {
+    } else if (sideInputTagToView.containsKey(tag)) {
       processSideInput(tag, elem);
+    } else {
+      LOG.warn("Discard unexpected elem={} from tag={}", elem.getValue(), tag);
+      LOG.warn("Current mainInputTag={}, sideInputTags={}",
+              mainInputTag, sideInputTagToView.keySet());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/30f3eda6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java
index 49b0f85..138a5dc 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/MultiOutputDoFnExecutor.java
@@ -46,11 +46,7 @@ class MultiOutputDoFnExecutor<InputT, OutputT> extends DoFnExecutor<InputT, Outp
   public class MultiOutputDoFnExecutorOutputManager extends DoFnExecutorOutputManager {
     @Override
     public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
-      if (localTupleTagMap.containsKey(tag)) {
-        executorsBolt.processExecutorElem((TupleTag<T>) localTupleTagMap.get(tag), output);
-      } else {
-        executorsBolt.processExecutorElem(tag, output);
-      }
+      executorsBolt.processExecutorElem(tag, output);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/30f3eda6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java
index 6feb7f8..e6d09c4 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/ParDoBoundTranslator.java
@@ -20,8 +20,10 @@ package org.apache.beam.runners.jstorm.translation;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import java.util.List;
 import java.util.Map;
+
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
@@ -54,8 +56,7 @@ class ParDoBoundTranslator<InputT, OutputT>
     TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
     List<TupleTag<?>> sideOutputTags = Lists.newArrayList();
 
-    Map<TupleTag<?>, PValue> allInputs =
-        avro.shaded.com.google.common.collect.Maps.newHashMap(userGraphContext.getInputs());
+    Map<TupleTag<?>, PValue> allInputs = Maps.newHashMap(userGraphContext.getInputs());
     for (PCollectionView pCollectionView : transform.getSideInputs()) {
       allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
     }

http://git-wip-us.apache.org/repos/asf/beam/blob/30f3eda6/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
index e25f211..101921f 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslationContext.java
@@ -36,7 +36,6 @@ import java.util.Map;
 import org.apache.beam.runners.jstorm.JStormPipelineOptions;
 import org.apache.beam.sdk.runners.AppliedPTransform;
 import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.PValueBase;
 import org.apache.beam.sdk.values.TaggedPValue;
 import org.apache.beam.sdk.values.TupleTag;
 import org.slf4j.Logger;
@@ -184,10 +183,6 @@ public class TranslationContext {
       TupleTag tag = entry.getKey();
       PValue value = entry.getValue();
 
-      // use tag of PValueBase
-      if (value instanceof PValueBase) {
-        tag = ((PValueBase) value).expand().keySet().iterator().next();
-      }
       executionGraphContext.registerStreamProducer(
           TaggedPValue.of(tag, value),
           Stream.Producer.of(name, tag.getId(), value.getName()));
@@ -198,6 +193,9 @@ public class TranslationContext {
     for (Map.Entry<TupleTag<?>, PValue> entry : inputs.entrySet()) {
       TupleTag tag = entry.getKey();
       PValue value = entry.getValue();
+      if (userGraphContext.findTupleTag(value) != null) {
+        tag = userGraphContext.findTupleTag(value);
+      }
       bolt.addExecutor(tag, executor);
 
       // filter all connections inside bolt
@@ -269,11 +267,15 @@ public class TranslationContext {
     }
 
     public TupleTag<?> getInputTag() {
-      return currentTransform.getInputs().keySet().iterator().next();
+      return pValueToTupleTag.get(this.getInput());
     }
 
     public List<TupleTag<?>> getInputTags() {
-      return Lists.newArrayList(currentTransform.getInputs().keySet());
+      List inputTags = Lists.newArrayList();
+      for (PValue value : currentTransform.getInputs().values()) {
+        inputTags.add(pValueToTupleTag.get(value));
+      }
+      return inputTags;
     }
 
     public <T extends PValue> T getOutput() {


[21/53] [abbrv] beam git commit: jstorm-runner: move most classes to translation package and reduece their visibility to package private.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
deleted file mode 100644
index 6e3392c..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundMultiTranslator.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Maps;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.runtime.DoFnExecutor;
-import org.apache.beam.runners.jstorm.translation.runtime.MultiOutputDoFnExecutor;
-import org.apache.beam.runners.jstorm.translation.runtime.MultiStatefulDoFnExecutor;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.PValueBase;
-import org.apache.beam.sdk.values.TupleTag;
-
-/**
- * Translates a ParDo.BoundMulti to a Storm {@link DoFnExecutor}.
- */
-public class ParDoBoundMultiTranslator<InputT, OutputT>
-    extends TransformTranslator.Default<ParDo.MultiOutput<InputT, OutputT>> {
-
-  @Override
-  public void translateNode(
-      ParDo.MultiOutput<InputT, OutputT> transform, TranslationContext context) {
-    final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-    final TupleTag<InputT> inputTag = (TupleTag<InputT>) userGraphContext.getInputTag();
-    PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
-
-    Map<TupleTag<?>, PValue> allOutputs = Maps.newHashMap(userGraphContext.getOutputs());
-    Map<TupleTag<?>, TupleTag<?>> localToExternalTupleTagMap = Maps.newHashMap();
-    for (Map.Entry<TupleTag<?>, PValue> entry : allOutputs.entrySet()) {
-      Iterator<TupleTag<?>> itr = ((PValueBase) entry.getValue()).expand().keySet().iterator();
-      localToExternalTupleTagMap.put(entry.getKey(), itr.next());
-    }
-
-    TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
-    List<TupleTag<?>> sideOutputTags = userGraphContext.getOutputTags();
-    sideOutputTags.remove(mainOutputTag);
-
-    Map<TupleTag<?>, PValue> allInputs = Maps.newHashMap(userGraphContext.getInputs());
-    for (PCollectionView pCollectionView : transform.getSideInputs()) {
-      allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-    }
-    String description = describeTransform(
-        transform,
-        allInputs,
-        allOutputs);
-
-    ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
-    for (PCollectionView pCollectionView : transform.getSideInputs()) {
-      sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-    }
-
-    DoFnExecutor executor;
-    DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
-    if (signature.stateDeclarations().size() > 0
-        || signature.timerDeclarations().size() > 0) {
-      executor = new MultiStatefulDoFnExecutor<>(
-          userGraphContext.getStepName(),
-          description,
-          userGraphContext.getOptions(),
-          (DoFn<KV, OutputT>) transform.getFn(),
-          (Coder) WindowedValue.getFullCoder(
-              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-          input.getWindowingStrategy(),
-          (TupleTag<KV>) inputTag,
-          transform.getSideInputs(),
-          sideInputTagToView.build(),
-          mainOutputTag,
-          sideOutputTags,
-          localToExternalTupleTagMap);
-    } else {
-      executor = new MultiOutputDoFnExecutor<>(
-          userGraphContext.getStepName(),
-          description,
-          userGraphContext.getOptions(),
-          transform.getFn(),
-          WindowedValue.getFullCoder(
-              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-          input.getWindowingStrategy(),
-          inputTag,
-          transform.getSideInputs(),
-          sideInputTagToView.build(),
-          mainOutputTag,
-          sideOutputTags,
-          localToExternalTupleTagMap);
-    }
-
-    context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
deleted file mode 100644
index ad8f85f..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ParDoBoundTranslator.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
-import java.util.List;
-import java.util.Map;
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.runtime.DoFnExecutor;
-import org.apache.beam.runners.jstorm.translation.runtime.StatefulDoFnExecutor;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.TupleTag;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Translates a ParDo.Bound to a JStorm {@link DoFnExecutor}.
- */
-public class ParDoBoundTranslator<InputT, OutputT>
-    extends TransformTranslator.Default<ParDo.SingleOutput<InputT, OutputT>> {
-
-  private static final Logger LOG = LoggerFactory.getLogger(ParDoBoundTranslator.class);
-
-  @Override
-  public void translateNode(
-      ParDo.SingleOutput<InputT, OutputT> transform, TranslationContext context) {
-    final TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-    final TupleTag<?> inputTag = userGraphContext.getInputTag();
-    PCollection<InputT> input = (PCollection<InputT>) userGraphContext.getInput();
-
-    TupleTag<OutputT> mainOutputTag = (TupleTag<OutputT>) userGraphContext.getOutputTag();
-    List<TupleTag<?>> sideOutputTags = Lists.newArrayList();
-
-    Map<TupleTag<?>, PValue> allInputs =
-        avro.shaded.com.google.common.collect.Maps.newHashMap(userGraphContext.getInputs());
-    for (PCollectionView pCollectionView : transform.getSideInputs()) {
-      allInputs.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-    }
-    String description = describeTransform(
-        transform,
-        allInputs,
-        userGraphContext.getOutputs());
-
-    ImmutableMap.Builder<TupleTag, PCollectionView<?>> sideInputTagToView = ImmutableMap.builder();
-    for (PCollectionView pCollectionView : transform.getSideInputs()) {
-      sideInputTagToView.put(userGraphContext.findTupleTag(pCollectionView), pCollectionView);
-    }
-
-    DoFnExecutor executor;
-    DoFnSignature signature = DoFnSignatures.getSignature(transform.getFn().getClass());
-    if (signature.stateDeclarations().size() > 0
-        || signature.timerDeclarations().size() > 0) {
-      executor = new StatefulDoFnExecutor<>(
-          userGraphContext.getStepName(),
-          description,
-          userGraphContext.getOptions(),
-          (DoFn<KV, OutputT>) transform.getFn(),
-          (Coder) WindowedValue.getFullCoder(
-              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-          input.getWindowingStrategy(),
-          (TupleTag<KV>) inputTag,
-          transform.getSideInputs(),
-          sideInputTagToView.build(),
-          mainOutputTag,
-          sideOutputTags);
-    } else {
-      executor = new DoFnExecutor<>(
-          userGraphContext.getStepName(),
-          description,
-          userGraphContext.getOptions(),
-          transform.getFn(),
-          WindowedValue.getFullCoder(
-              input.getCoder(), input.getWindowingStrategy().getWindowFn().windowCoder()),
-          input.getWindowingStrategy(),
-          (TupleTag<InputT>) inputTag,
-          transform.getSideInputs(),
-          sideInputTagToView.build(),
-          mainOutputTag,
-          sideOutputTags);
-    }
-
-    context.addTransformExecutor(executor, ImmutableList.<PValue>copyOf(transform.getSideInputs()));
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
deleted file mode 100644
index 71243b9..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/Stream.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import com.google.auto.value.AutoValue;
-import java.util.List;
-import javax.annotation.Nullable;
-
-/**
- * Class that defines the stream connection between upstream and downstream components.
- */
-@AutoValue
-public abstract class Stream {
-
-  public abstract Producer getProducer();
-
-  public abstract Consumer getConsumer();
-
-  public static Stream of(Producer producer, Consumer consumer) {
-    return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream(
-        producer, consumer);
-  }
-
-  /**
-   * JStorm producer.
-   */
-  @AutoValue
-  public abstract static class Producer {
-    public abstract String getComponentId();
-
-    public abstract String getStreamId();
-
-    public abstract String getStreamName();
-
-    public static Producer of(String componentId, String streamId, String streamName) {
-      return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Producer(
-          componentId, streamId, streamName);
-    }
-  }
-
-  /**
-   * JStorm consumer.
-   */
-  @AutoValue
-  public abstract static class Consumer {
-    public abstract String getComponentId();
-
-    public abstract Grouping getGrouping();
-
-    public static Consumer of(String componentId, Grouping grouping) {
-      return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Consumer(
-          componentId, grouping);
-    }
-  }
-
-  /**
-   * JStorm grouping, which define how to transfer message between two nodes.
-   */
-  @AutoValue
-  public abstract static class Grouping {
-    public abstract Type getType();
-
-    @Nullable
-    public abstract List<String> getFields();
-
-    public static Grouping of(Type type) {
-      checkArgument(!Type.FIELDS.equals(type), "Fields grouping should provide key fields.");
-      return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Grouping(
-          type, null /* fields */);
-    }
-
-    public static Grouping byFields(List<String> fields) {
-      checkNotNull(fields, "fields");
-      checkArgument(!fields.isEmpty(), "No key fields were provided for field grouping!");
-      return new org.apache.beam.runners.jstorm.translation.translator.AutoValue_Stream_Grouping(
-          Type.FIELDS, fields);
-    }
-
-    /**
-     * Types of stream groupings Storm allows.
-     */
-    public enum Type {
-      ALL, CUSTOM, DIRECT, SHUFFLE, LOCAL_OR_SHUFFLE, FIELDS, GLOBAL, NONE
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
deleted file mode 100644
index bfa94a0..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/TransformTranslator.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.collect.FluentIterable;
-import java.util.Map;
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-/**
- * Interface for classes capable of tranforming Beam PTransforms into Storm primitives.
- */
-public interface TransformTranslator<T extends PTransform<?, ?>> {
-
-  void translateNode(T transform, TranslationContext context);
-
-  /**
-   * Returns true if this translator can translate the given transform.
-   */
-  boolean canTranslate(T transform, TranslationContext context);
-
-    /**
-     * Default translator.
-     * @param <T1>
-     */
-  class Default<T1 extends PTransform<?, ?>> implements TransformTranslator<T1> {
-    @Override
-    public void translateNode(T1 transform, TranslationContext context) {
-
-    }
-
-    @Override
-    public boolean canTranslate(T1 transform, TranslationContext context) {
-      return true;
-    }
-
-    static String describeTransform(
-        PTransform<?, ?> transform,
-        Map<TupleTag<?>, PValue> inputs,
-        Map<TupleTag<?>, PValue> outputs) {
-      return String.format("%s --> %s --> %s",
-          Joiner.on('+').join(FluentIterable.from(inputs.entrySet())
-              .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
-                @Override
-                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPValue) {
-                  return taggedPValue.getKey().getId();
-                  // return taggedPValue.getValue().getName();
-                }
-              })),
-          transform.getName(),
-          Joiner.on('+').join(FluentIterable.from(outputs.entrySet())
-              .transform(new Function<Map.Entry<TupleTag<?>, PValue>, String>() {
-                @Override
-                public String apply(Map.Entry<TupleTag<?>, PValue> taggedPvalue) {
-                  return taggedPvalue.getKey().getId();
-                  //return taggedPValue.getValue().getName();
-                }
-              })));
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java
deleted file mode 100644
index 33ac024..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/UnboundedSourceTranslator.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
-import org.apache.beam.sdk.io.Read;
-import org.apache.beam.sdk.values.PValue;
-import org.apache.beam.sdk.values.TaggedPValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-/**
- * Translates a Read.Unbounded into a Storm spout.
- *
- * @param <T>
- */
-public class UnboundedSourceTranslator<T> extends TransformTranslator.Default<Read.Unbounded<T>> {
-  public void translateNode(Read.Unbounded<T> transform, TranslationContext context) {
-    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-    String description =
-        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-
-    TupleTag<?> tag = userGraphContext.getOutputTag();
-    PValue output = userGraphContext.getOutput();
-
-    UnboundedSourceSpout spout = new UnboundedSourceSpout(
-        description,
-        transform.getSource(), userGraphContext.getOptions(), tag);
-    context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(tag, output));
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
deleted file mode 100644
index f71ee9c..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ViewTranslator.java
+++ /dev/null
@@ -1,380 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.runtime.ViewExecutor;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.coders.CoderRegistry;
-import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.coders.ListCoder;
-import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.View;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.PCollectionViews;
-
-/**
- * A {@link TransformTranslator} for executing {@link View Views} in JStorm runner.
- */
-public class ViewTranslator
-    extends TransformTranslator.Default<ViewTranslator.CreateJStormPCollectionView<?, ?>> {
-  @Override
-  public void translateNode(
-      CreateJStormPCollectionView<?, ?> transform, TranslationContext context) {
-    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-    String description = describeTransform(
-        transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-    ViewExecutor viewExecutor = new ViewExecutor(description, userGraphContext.getOutputTag());
-    context.addTransformExecutor(viewExecutor);
-  }
-
-  /**
-   * Specialized implementation for
-   * {@link org.apache.beam.sdk.transforms.View.AsMap View.AsMap}.
-   */
-  public static class ViewAsMap<K, V>
-      extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, V>>> {
-
-    @SuppressWarnings("unused") // used via reflection in JstormRunner#apply()
-    public ViewAsMap(View.AsMap<K, V> transform) {
-    }
-
-    @Override
-    public PCollectionView<Map<K, V>> expand(PCollection<KV<K, V>> input) {
-      PCollectionView<Map<K, V>> view =
-          PCollectionViews.mapView(
-              input,
-              input.getWindowingStrategy(),
-              input.getCoder());
-
-      @SuppressWarnings({"rawtypes", "unchecked"})
-      KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
-      try {
-        inputCoder.getKeyCoder().verifyDeterministic();
-      } catch (Coder.NonDeterministicException e) {
-        // TODO: log warning as other runners.
-      }
-
-      return input
-          .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
-          .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, V>>of(view));
-    }
-
-    @Override
-    protected String getKindString() {
-      return "StreamingViewAsMap";
-    }
-  }
-
-  /**
-   * Specialized expansion for {@link
-   * View.AsMultimap View.AsMultimap}.
-   */
-  public static class ViewAsMultimap<K, V>
-      extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, Iterable<V>>>> {
-
-    /**
-     * Builds an instance of this class from the overridden transform.
-     */
-    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-    public ViewAsMultimap(View.AsMultimap<K, V> transform) {
-    }
-
-    @Override
-    public PCollectionView<Map<K, Iterable<V>>> expand(PCollection<KV<K, V>> input) {
-      PCollectionView<Map<K, Iterable<V>>> view =
-          PCollectionViews.multimapView(
-              input,
-              input.getWindowingStrategy(),
-              input.getCoder());
-
-      @SuppressWarnings({"rawtypes", "unchecked"})
-      KvCoder<K, V> inputCoder = (KvCoder) input.getCoder();
-      try {
-        inputCoder.getKeyCoder().verifyDeterministic();
-      } catch (Coder.NonDeterministicException e) {
-        // TODO: log warning as other runners.
-      }
-
-      return input
-          .apply(Combine.globally(new Concatenate<KV<K, V>>()).withoutDefaults())
-          .apply(CreateJStormPCollectionView.<KV<K, V>, Map<K, Iterable<V>>>of(view));
-    }
-
-    @Override
-    protected String getKindString() {
-      return "StreamingViewAsMultimap";
-    }
-  }
-
-  /**
-   * Specialized implementation for
-   * {@link View.AsList View.AsList}.
-   */
-  public static class ViewAsList<T>
-      extends PTransform<PCollection<T>, PCollectionView<List<T>>> {
-    /**
-     * Builds an instance of this class from the overridden transform.
-     */
-    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-    public ViewAsList(View.AsList<T> transform) {
-    }
-
-    @Override
-    public PCollectionView<List<T>> expand(PCollection<T> input) {
-      PCollectionView<List<T>> view =
-          PCollectionViews.listView(
-              input,
-              input.getWindowingStrategy(),
-              input.getCoder());
-
-      return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
-          .apply(CreateJStormPCollectionView.<T, List<T>>of(view));
-    }
-
-    @Override
-    protected String getKindString() {
-      return "StreamingViewAsList";
-    }
-  }
-
-  /**
-   * Specialized implementation for
-   * {@link View.AsIterable View.AsIterable} for the
-   * JStorm runner in streaming mode.
-   */
-  public static class ViewAsIterable<T>
-      extends PTransform<PCollection<T>, PCollectionView<Iterable<T>>> {
-    /**
-     * Builds an instance of this class from the overridden transform.
-     */
-    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-    public ViewAsIterable(View.AsIterable<T> transform) {
-    }
-
-    @Override
-    public PCollectionView<Iterable<T>> expand(PCollection<T> input) {
-      PCollectionView<Iterable<T>> view =
-          PCollectionViews.iterableView(
-              input,
-              input.getWindowingStrategy(),
-              input.getCoder());
-
-      return input.apply(Combine.globally(new Concatenate<T>()).withoutDefaults())
-          .apply(CreateJStormPCollectionView.<T, Iterable<T>>of(view));
-    }
-
-    @Override
-    protected String getKindString() {
-      return "StreamingViewAsIterable";
-    }
-  }
-
-  /**
-   * Specialized expansion for
-   * {@link View.AsSingleton View.AsSingleton} for the
-   * JStorm runner in streaming mode.
-   */
-  public static class ViewAsSingleton<T>
-      extends PTransform<PCollection<T>, PCollectionView<T>> {
-    private View.AsSingleton<T> transform;
-
-    /**
-     * Builds an instance of this class from the overridden transform.
-     */
-    @SuppressWarnings("unused") // used via reflection in JStormRunner#apply()
-    public ViewAsSingleton(View.AsSingleton<T> transform) {
-      this.transform = transform;
-    }
-
-    @Override
-    public PCollectionView<T> expand(PCollection<T> input) {
-      Combine.Globally<T, T> combine = Combine.globally(
-          new SingletonCombine<>(transform.hasDefaultValue(), transform.defaultValue()));
-      if (!transform.hasDefaultValue()) {
-        combine = combine.withoutDefaults();
-      }
-      return input.apply(combine.asSingletonView());
-    }
-
-    @Override
-    protected String getKindString() {
-      return "StreamingViewAsSingleton";
-    }
-
-    private static class SingletonCombine<T> extends Combine.BinaryCombineFn<T> {
-      private boolean hasDefaultValue;
-      private T defaultValue;
-
-      SingletonCombine(boolean hasDefaultValue, T defaultValue) {
-        this.hasDefaultValue = hasDefaultValue;
-        this.defaultValue = defaultValue;
-      }
-
-      @Override
-      public T apply(T left, T right) {
-        throw new IllegalArgumentException("PCollection with more than one element "
-            + "accessed as a singleton view. Consider using Combine.globally().asSingleton() to "
-            + "combine the PCollection into a single value");
-      }
-
-      @Override
-      public T identity() {
-        if (hasDefaultValue) {
-          return defaultValue;
-        } else {
-          throw new IllegalArgumentException(
-              "Empty PCollection accessed as a singleton view. "
-                  + "Consider setting withDefault to provide a default value");
-        }
-      }
-    }
-  }
-
-  /**
-   * Specialized expansion for
-   * {@link org.apache.beam.sdk.transforms.Combine.GloballyAsSingletonView}.
-   * @param <InputT>
-   * @param <OutputT>
-     */
-  public static class CombineGloballyAsSingletonView<InputT, OutputT>
-      extends PTransform<PCollection<InputT>, PCollectionView<OutputT>> {
-    Combine.GloballyAsSingletonView<InputT, OutputT> transform;
-
-    /**
-     * Builds an instance of this class from the overridden transform.
-     */
-    @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply()
-    public CombineGloballyAsSingletonView(
-        Combine.GloballyAsSingletonView<InputT, OutputT> transform) {
-      this.transform = transform;
-    }
-
-    @Override
-    public PCollectionView<OutputT> expand(PCollection<InputT> input) {
-      PCollection<OutputT> combined =
-          input.apply(Combine.globally(transform.getCombineFn())
-              .withoutDefaults()
-              .withFanout(transform.getFanout()));
-
-      PCollectionView<OutputT> view = PCollectionViews.singletonView(
-          combined,
-          combined.getWindowingStrategy(),
-          transform.getInsertDefault(),
-          transform.getInsertDefault()
-              ? transform.getCombineFn().defaultValue() : null,
-          combined.getCoder());
-      return combined
-          .apply(ParDo.of(new WrapAsList<OutputT>()))
-          .apply(CreateJStormPCollectionView.<OutputT, OutputT>of(view));
-    }
-
-    @Override
-    protected String getKindString() {
-      return "StreamingCombineGloballyAsSingletonView";
-    }
-  }
-
-  private static class WrapAsList<T> extends DoFn<T, List<T>> {
-    @ProcessElement
-    public void processElement(ProcessContext c) {
-      c.output(Collections.singletonList(c.element()));
-    }
-  }
-
-  /**
-   * Combiner that combines {@code T}s into a single {@code List<T>} containing all inputs.
-   * They require the input {@link PCollection} fits in memory.
-   * For a large {@link PCollection} this is expected to crash!
-   *
-   * @param <T> the type of elements to concatenate.
-   */
-  private static class Concatenate<T> extends Combine.CombineFn<T, List<T>, List<T>> {
-    private static final long serialVersionUID = 1L;
-
-    @Override
-    public List<T> createAccumulator() {
-      return new ArrayList<>();
-    }
-
-    @Override
-    public List<T> addInput(List<T> accumulator, T input) {
-      accumulator.add(input);
-      return accumulator;
-    }
-
-    @Override
-    public List<T> mergeAccumulators(Iterable<List<T>> accumulators) {
-      List<T> result = createAccumulator();
-      for (List<T> accumulator : accumulators) {
-        result.addAll(accumulator);
-      }
-      return result;
-    }
-
-    @Override
-    public List<T> extractOutput(List<T> accumulator) {
-      return accumulator;
-    }
-
-    @Override
-    public Coder<List<T>> getAccumulatorCoder(CoderRegistry registry, Coder<T> inputCoder) {
-      return ListCoder.of(inputCoder);
-    }
-
-    @Override
-    public Coder<List<T>> getDefaultOutputCoder(CoderRegistry registry, Coder<T> inputCoder) {
-      return ListCoder.of(inputCoder);
-    }
-  }
-
-  /**
-   * Creates a primitive {@link PCollectionView}.
-   * For internal use only by runner implementors.
-   *
-   * @param <ElemT> The type of the elements of the input PCollection
-   * @param <ViewT> The type associated with the {@link PCollectionView} used as a side input
-   */
-  public static class CreateJStormPCollectionView<ElemT, ViewT>
-      extends PTransform<PCollection<List<ElemT>>, PCollectionView<ViewT>> {
-    private PCollectionView<ViewT> view;
-
-    private CreateJStormPCollectionView(PCollectionView<ViewT> view) {
-      this.view = view;
-    }
-
-    public static <ElemT, ViewT> CreateJStormPCollectionView<ElemT, ViewT> of(
-        PCollectionView<ViewT> view) {
-      return new CreateJStormPCollectionView<>(view);
-    }
-
-    @Override
-    public PCollectionView<ViewT> expand(PCollection<List<ElemT>> input) {
-      return view;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
deleted file mode 100644
index 2ccb8d7..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowAssignTranslator.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.runners.jstorm.translation.runtime.WindowAssignExecutor;
-import org.apache.beam.sdk.transforms.windowing.Window;
-
-/**
- * Translates a {@link org.apache.beam.sdk.transforms.windowing.Window.Assign} to a
- * JStorm {@link WindowAssignExecutor}.
- * @param <T>
- */
-public class WindowAssignTranslator<T> extends TransformTranslator.Default<Window.Assign<T>> {
-
-  @Override
-  public void translateNode(Window.Assign<T> transform, TranslationContext context) {
-    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-    String description =
-        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
-    context.getUserGraphContext().setWindowed();
-    WindowAssignExecutor executor = new WindowAssignExecutor(
-        description,
-        transform.getWindowFn(),
-        userGraphContext.getOutputTag());
-    context.addTransformExecutor(executor);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
deleted file mode 100644
index 4b92a4c..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/CommonInstance.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.util;
-
-/**
- * Common definition of JStorm runner.
- */
-public class CommonInstance {
-  public static final String KEY = "Key";
-  public static final String VALUE = "Value";
-
-  public static final String BEAM_WATERMARK_STREAM_ID = "BEAM_WATERMARK";
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java
deleted file mode 100644
index 4eb1d8f..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultStepContext.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.util;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import java.io.IOException;
-import org.apache.beam.runners.core.ExecutionContext;
-import org.apache.beam.runners.core.StateInternals;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.TupleTag;
-
-/**
- * Default StepContext for running DoFn This does not allow accessing state or timer internals.
- */
-public class DefaultStepContext implements ExecutionContext.StepContext {
-
-  private TimerInternals timerInternals;
-
-  private StateInternals stateInternals;
-
-  public DefaultStepContext(TimerInternals timerInternals, StateInternals stateInternals) {
-    this.timerInternals = checkNotNull(timerInternals, "timerInternals");
-    this.stateInternals = checkNotNull(stateInternals, "stateInternals");
-  }
-
-  @Override
-  public String getStepName() {
-    return null;
-  }
-
-  @Override
-  public String getTransformName() {
-    return null;
-  }
-
-  @Override
-  public void noteOutput(WindowedValue<?> windowedValue) {
-
-  }
-
-  @Override
-  public void noteOutput(TupleTag<?> tupleTag, WindowedValue<?> windowedValue) {
-
-  }
-
-  @Override
-  public <T, W extends BoundedWindow> void writePCollectionViewData(
-      TupleTag<?> tag, Iterable<WindowedValue<T>> data,
-      Coder<Iterable<WindowedValue<T>>> dataCoder, W window, Coder<W> windowCoder)
-      throws IOException {
-    throw new UnsupportedOperationException("Writing side-input data is not supported.");
-  }
-
-  @Override
-  public StateInternals stateInternals() {
-    return stateInternals;
-  }
-
-  @Override
-  public TimerInternals timerInternals() {
-    return timerInternals;
-  }
-
-  public void setStateInternals(StateInternals stateInternals) {
-    this.stateInternals = stateInternals;
-  }
-
-  public void setTimerInternals(TimerInternals timerInternals) {
-    this.timerInternals = timerInternals;
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
deleted file mode 100644
index ad83c2b..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/RunnerUtils.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.util;
-
-import org.apache.beam.runners.core.KeyedWorkItem;
-import org.apache.beam.runners.jstorm.translation.runtime.Executor;
-import org.apache.beam.runners.jstorm.translation.runtime.GroupByWindowExecutor;
-import org.apache.beam.runners.jstorm.translation.runtime.MultiStatefulDoFnExecutor;
-import org.apache.beam.runners.jstorm.translation.runtime.StatefulDoFnExecutor;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-
-/**
- * Utils for JStorm runner.
- */
-public class RunnerUtils {
-  /**
-   * Convert {@link WindowedValue} into {@link KeyedWorkItem}.
-   * @param elem
-   * @return
-   */
-  public static <K, V> KeyedWorkItem<K, V> toKeyedWorkItem(WindowedValue<KV<K, V>> elem) {
-    WindowedValue<KV<K, V>> kvElem = (WindowedValue<KV<K, V>>) elem;
-    SingletonKeyedWorkItem<K, V> workItem = SingletonKeyedWorkItem.of(
-        kvElem.getValue().getKey(),
-        kvElem.withValue(kvElem.getValue().getValue()));
-    return workItem;
-  }
-
-  public static boolean isGroupByKeyExecutor(Executor executor) {
-    if (executor instanceof GroupByWindowExecutor) {
-      return true;
-    } else if (executor instanceof StatefulDoFnExecutor
-            || executor instanceof MultiStatefulDoFnExecutor) {
-      return true;
-    } else {
-      return false;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
deleted file mode 100644
index 479afdc..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SerializedPipelineOptions.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.runners.jstorm.util;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.Serializable;
-import org.apache.beam.sdk.options.PipelineOptions;
-
-/**
- * Encapsulates the PipelineOptions in serialized form to ship them to the cluster.
- */
-public class SerializedPipelineOptions implements Serializable {
-
-  private final byte[] serializedOptions;
-
-  /**
-   * Lazily initialized copy of deserialized options.
-   */
-  private transient PipelineOptions pipelineOptions;
-
-  public SerializedPipelineOptions(PipelineOptions options) {
-    checkNotNull(options, "PipelineOptions must not be null.");
-
-    try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
-      new ObjectMapper().writeValue(baos, options);
-      this.serializedOptions = baos.toByteArray();
-    } catch (Exception e) {
-      throw new RuntimeException("Couldn't serialize PipelineOptions.", e);
-    }
-
-  }
-
-  public PipelineOptions getPipelineOptions() {
-    if (pipelineOptions == null) {
-      try {
-        pipelineOptions = new ObjectMapper().readValue(serializedOptions, PipelineOptions.class);
-      } catch (IOException e) {
-        throw new RuntimeException("Couldn't deserialize the PipelineOptions.", e);
-      }
-    }
-
-    return pipelineOptions;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
deleted file mode 100644
index 46a12b9..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/util/SingletonKeyedWorkItem.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.util;
-
-import java.util.Collections;
-import org.apache.beam.runners.core.KeyedWorkItem;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.sdk.util.WindowedValue;
-
-/**
- * Singleton keyed word item.
- * @param <K>
- * @param <ElemT>
- */
-public class SingletonKeyedWorkItem<K, ElemT> implements KeyedWorkItem<K, ElemT> {
-
-  final K key;
-  final WindowedValue<ElemT> value;
-
-  private SingletonKeyedWorkItem(K key, WindowedValue<ElemT> value) {
-    this.key = key;
-    this.value = value;
-  }
-
-  public static <K, ElemT> SingletonKeyedWorkItem<K, ElemT> of(K key, WindowedValue<ElemT> value) {
-    return new SingletonKeyedWorkItem<K, ElemT>(key, value);
-  }
-
-  @Override
-  public K key() {
-    return key;
-  }
-
-  public WindowedValue<ElemT> value() {
-    return value;
-  }
-
-  @Override
-  public Iterable<TimerInternals.TimerData> timersIterable() {
-    return Collections.EMPTY_LIST;
-  }
-
-  @Override
-  public Iterable<WindowedValue<ElemT>> elementsIterable() {
-    return Collections.singletonList(value);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/JStormStateInternalsTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/JStormStateInternalsTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/JStormStateInternalsTest.java
new file mode 100644
index 0000000..b2ca267
--- /dev/null
+++ b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/JStormStateInternalsTest.java
@@ -0,0 +1,221 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.jstorm.translation;
+
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+
+import com.alibaba.jstorm.cache.IKvStoreManager;
+import com.alibaba.jstorm.cache.rocksdb.RocksDbKvStoreManagerFactory;
+import com.alibaba.jstorm.utils.KryoSerializer;
+import com.google.common.collect.Maps;
+import java.util.Iterator;
+import java.util.Map;
+import org.apache.beam.runners.core.StateNamespaces;
+import org.apache.beam.runners.core.StateTags;
+import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.CoderRegistry;
+import org.apache.beam.sdk.state.BagState;
+import org.apache.beam.sdk.state.CombiningState;
+import org.apache.beam.sdk.state.MapState;
+import org.apache.beam.sdk.state.ValueState;
+import org.apache.beam.sdk.state.WatermarkHoldState;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.Max;
+import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
+import org.joda.time.Instant;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/**
+ * Tests for {@link JStormStateInternals}.
+ */
+@RunWith(JUnit4.class)
+public class JStormStateInternalsTest {
+
+  @Rule
+  public final TemporaryFolder tmp = new TemporaryFolder();
+
+  private JStormStateInternals<String> jstormStateInternals;
+
+  @Before
+  public void setup() throws Exception {
+    IKvStoreManager kvStoreManager = RocksDbKvStoreManagerFactory.getManager(
+        Maps.newHashMap(),
+        "test",
+        tmp.toString(),
+        new KryoSerializer(Maps.newHashMap()));
+    jstormStateInternals = new JStormStateInternals(
+        "key-1", kvStoreManager, new TimerServiceImpl(), 0);
+  }
+
+  @Test
+  public void testValueState() throws Exception {
+    ValueState<Integer> valueState = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
+    valueState.write(Integer.MIN_VALUE);
+    assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
+    valueState.write(Integer.MAX_VALUE);
+    assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
+  }
+
+  @Test
+  public void testValueStateIdenticalId() throws Exception {
+    ValueState<Integer> valueState = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
+    ValueState<Integer> valueStateIdentical = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
+
+    valueState.write(Integer.MIN_VALUE);
+    assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
+    assertEquals(Integer.MIN_VALUE, valueStateIdentical.read().longValue());
+    valueState.write(Integer.MAX_VALUE);
+    assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
+    assertEquals(Integer.MAX_VALUE, valueStateIdentical.read().longValue());
+  }
+
+  @Test
+  public void testBagState() throws Exception {
+    BagState<Integer> bagStateA = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
+    BagState<Integer> bagStateB = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.bag("state-id-b", BigEndianIntegerCoder.of()));
+
+    bagStateA.add(1);
+    bagStateA.add(0);
+    bagStateA.add(Integer.MAX_VALUE);
+
+    bagStateB.add(0);
+    bagStateB.add(Integer.MIN_VALUE);
+
+    Iterable<Integer> bagA = bagStateA.read();
+    Iterable<Integer> bagB = bagStateB.read();
+    assertThat(bagA, containsInAnyOrder(1, 0, Integer.MAX_VALUE));
+    assertThat(bagB, containsInAnyOrder(0, Integer.MIN_VALUE));
+
+    bagStateA.clear();
+    bagStateA.add(1);
+    bagStateB.add(0);
+    assertThat(bagStateA.read(), containsInAnyOrder(1));
+    assertThat(bagStateB.read(), containsInAnyOrder(0, 0, Integer.MIN_VALUE));
+  }
+
+  @Test
+  public void testCombiningState() throws Exception {
+    Combine.CombineFn<Integer, int[], Integer> combineFn = Max.ofIntegers();
+    Coder<int[]> accumCoder = combineFn.getAccumulatorCoder(
+        CoderRegistry.createDefault(), BigEndianIntegerCoder.of());
+
+    CombiningState<Integer, int[], Integer> combiningState = jstormStateInternals.state(
+        StateNamespaces.global(),
+        StateTags.combiningValue(
+            "state-id-a",
+            accumCoder,
+            combineFn));
+    assertEquals(Integer.MIN_VALUE, combiningState.read().longValue());
+    combiningState.add(10);
+    assertEquals(10, combiningState.read().longValue());
+    combiningState.add(1);
+    assertEquals(10, combiningState.read().longValue());
+    combiningState.add(Integer.MAX_VALUE);
+    assertEquals(Integer.MAX_VALUE, combiningState.read().longValue());
+  }
+
+  @Test
+  public void testWatermarkHoldState() throws Exception {
+    WatermarkHoldState watermarkHoldState = jstormStateInternals.state(
+        StateNamespaces.global(),
+        StateTags.watermarkStateInternal(
+            "state-id-a",
+            TimestampCombiner.EARLIEST));
+    watermarkHoldState.add(new Instant(1));
+    assertEquals(1, watermarkHoldState.read().getMillis());
+    watermarkHoldState.add(new Instant(Integer.MIN_VALUE));
+    assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
+    watermarkHoldState.add(new Instant(Integer.MAX_VALUE));
+    assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
+  }
+
+  @Test
+  public void testMapState() throws Exception {
+    MapState<Integer, Integer> mapStateA = jstormStateInternals.state(
+        StateNamespaces.global(),
+        StateTags.map("state-id-a", BigEndianIntegerCoder.of(), BigEndianIntegerCoder.of()));
+    mapStateA.put(1, 1);
+    mapStateA.put(2, 22);
+    mapStateA.put(1, 12);
+
+    Iterable<Integer> keys = mapStateA.keys().read();
+    Iterable<Integer> values = mapStateA.values().read();
+    assertThat(keys, containsInAnyOrder(1, 2));
+    assertThat(values, containsInAnyOrder(12, 22));
+
+    Iterable<Map.Entry<Integer, Integer>> entries = mapStateA.entries().read();
+    Iterator<Map.Entry<Integer, Integer>> itr = entries.iterator();
+    Map.Entry<Integer, Integer> entry = itr.next();
+    assertEquals((long) entry.getKey(), 1L);
+    assertEquals((long) entry.getValue(), 12L);
+    entry = itr.next();
+    assertEquals((long) entry.getKey(), 2L);
+    assertEquals((long) entry.getValue(), 22L);
+    assertEquals(false, itr.hasNext());
+
+    mapStateA.remove(1);
+    keys = mapStateA.keys().read();
+    values = mapStateA.values().read();
+    assertThat(keys, containsInAnyOrder(2));
+    assertThat(values, containsInAnyOrder(22));
+
+    entries = mapStateA.entries().read();
+    itr = entries.iterator();
+    entry = itr.next();
+    assertEquals((long) entry.getKey(), 2L);
+    assertEquals((long) entry.getValue(), 22L);
+    assertEquals(false, itr.hasNext());
+  }
+
+  @Test
+  public void testMassiveDataOfBagState() {
+    BagState<Integer> bagStateA = jstormStateInternals.state(
+        StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
+
+    int count = 10000;
+    int n = 1;
+    while (n <= count) {
+      bagStateA.add(n);
+      n++;
+    }
+
+    int readCount = 0;
+    int readN = 0;
+    Iterator<Integer> itr = bagStateA.read().iterator();
+    while (itr.hasNext()) {
+      readN += itr.next();
+      readCount++;
+    }
+
+    assertEquals((long) readN, ((1 + count) * count) / 2);
+    assertEquals((long) readCount, count);
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/82653534/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
deleted file mode 100644
index 66f33a7..0000000
--- a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternalsTest.java
+++ /dev/null
@@ -1,222 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.runtime.state;
-
-import static org.hamcrest.Matchers.containsInAnyOrder;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-
-import com.alibaba.jstorm.cache.IKvStoreManager;
-import com.alibaba.jstorm.cache.rocksdb.RocksDbKvStoreManagerFactory;
-import com.alibaba.jstorm.utils.KryoSerializer;
-import com.google.common.collect.Maps;
-import java.util.Iterator;
-import java.util.Map;
-import org.apache.beam.runners.core.StateNamespaces;
-import org.apache.beam.runners.core.StateTags;
-import org.apache.beam.runners.jstorm.translation.runtime.TimerServiceImpl;
-import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.coders.CoderRegistry;
-import org.apache.beam.sdk.state.BagState;
-import org.apache.beam.sdk.state.CombiningState;
-import org.apache.beam.sdk.state.MapState;
-import org.apache.beam.sdk.state.ValueState;
-import org.apache.beam.sdk.state.WatermarkHoldState;
-import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.Max;
-import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
-import org.joda.time.Instant;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-/**
- * Tests for {@link JStormStateInternals}.
- */
-@RunWith(JUnit4.class)
-public class JStormStateInternalsTest {
-
-  @Rule
-  public final TemporaryFolder tmp = new TemporaryFolder();
-
-  private JStormStateInternals<String> jstormStateInternals;
-
-  @Before
-  public void setup() throws Exception {
-    IKvStoreManager kvStoreManager = RocksDbKvStoreManagerFactory.getManager(
-        Maps.newHashMap(),
-        "test",
-        tmp.toString(),
-        new KryoSerializer(Maps.newHashMap()));
-    jstormStateInternals = new JStormStateInternals(
-        "key-1", kvStoreManager, new TimerServiceImpl(), 0);
-  }
-
-  @Test
-  public void testValueState() throws Exception {
-    ValueState<Integer> valueState = jstormStateInternals.state(
-        StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
-    valueState.write(Integer.MIN_VALUE);
-    assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
-    valueState.write(Integer.MAX_VALUE);
-    assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
-  }
-
-  @Test
-  public void testValueStateIdenticalId() throws Exception {
-    ValueState<Integer> valueState = jstormStateInternals.state(
-        StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
-    ValueState<Integer> valueStateIdentical = jstormStateInternals.state(
-        StateNamespaces.global(), StateTags.value("state-id-a", BigEndianIntegerCoder.of()));
-
-    valueState.write(Integer.MIN_VALUE);
-    assertEquals(Integer.MIN_VALUE, valueState.read().longValue());
-    assertEquals(Integer.MIN_VALUE, valueStateIdentical.read().longValue());
-    valueState.write(Integer.MAX_VALUE);
-    assertEquals(Integer.MAX_VALUE, valueState.read().longValue());
-    assertEquals(Integer.MAX_VALUE, valueStateIdentical.read().longValue());
-  }
-
-  @Test
-  public void testBagState() throws Exception {
-    BagState<Integer> bagStateA = jstormStateInternals.state(
-        StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
-    BagState<Integer> bagStateB = jstormStateInternals.state(
-        StateNamespaces.global(), StateTags.bag("state-id-b", BigEndianIntegerCoder.of()));
-
-    bagStateA.add(1);
-    bagStateA.add(0);
-    bagStateA.add(Integer.MAX_VALUE);
-
-    bagStateB.add(0);
-    bagStateB.add(Integer.MIN_VALUE);
-
-    Iterable<Integer> bagA = bagStateA.read();
-    Iterable<Integer> bagB = bagStateB.read();
-    assertThat(bagA, containsInAnyOrder(1, 0, Integer.MAX_VALUE));
-    assertThat(bagB, containsInAnyOrder(0, Integer.MIN_VALUE));
-
-    bagStateA.clear();
-    bagStateA.add(1);
-    bagStateB.add(0);
-    assertThat(bagStateA.read(), containsInAnyOrder(1));
-    assertThat(bagStateB.read(), containsInAnyOrder(0, 0, Integer.MIN_VALUE));
-  }
-
-  @Test
-  public void testCombiningState() throws Exception {
-    Combine.CombineFn<Integer, int[], Integer> combineFn = Max.ofIntegers();
-    Coder<int[]> accumCoder = combineFn.getAccumulatorCoder(
-        CoderRegistry.createDefault(), BigEndianIntegerCoder.of());
-
-    CombiningState<Integer, int[], Integer> combiningState = jstormStateInternals.state(
-        StateNamespaces.global(),
-        StateTags.combiningValue(
-            "state-id-a",
-            accumCoder,
-            combineFn));
-    assertEquals(Integer.MIN_VALUE, combiningState.read().longValue());
-    combiningState.add(10);
-    assertEquals(10, combiningState.read().longValue());
-    combiningState.add(1);
-    assertEquals(10, combiningState.read().longValue());
-    combiningState.add(Integer.MAX_VALUE);
-    assertEquals(Integer.MAX_VALUE, combiningState.read().longValue());
-  }
-
-  @Test
-  public void testWatermarkHoldState() throws Exception {
-    WatermarkHoldState watermarkHoldState = jstormStateInternals.state(
-        StateNamespaces.global(),
-        StateTags.watermarkStateInternal(
-            "state-id-a",
-            TimestampCombiner.EARLIEST));
-    watermarkHoldState.add(new Instant(1));
-    assertEquals(1, watermarkHoldState.read().getMillis());
-    watermarkHoldState.add(new Instant(Integer.MIN_VALUE));
-    assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
-    watermarkHoldState.add(new Instant(Integer.MAX_VALUE));
-    assertEquals(Integer.MIN_VALUE, watermarkHoldState.read().getMillis());
-  }
-
-  @Test
-  public void testMapState() throws Exception {
-    MapState<Integer, Integer> mapStateA = jstormStateInternals.state(
-        StateNamespaces.global(),
-        StateTags.map("state-id-a", BigEndianIntegerCoder.of(), BigEndianIntegerCoder.of()));
-    mapStateA.put(1, 1);
-    mapStateA.put(2, 22);
-    mapStateA.put(1, 12);
-
-    Iterable<Integer> keys = mapStateA.keys().read();
-    Iterable<Integer> values = mapStateA.values().read();
-    assertThat(keys, containsInAnyOrder(1, 2));
-    assertThat(values, containsInAnyOrder(12, 22));
-
-    Iterable<Map.Entry<Integer, Integer>> entries = mapStateA.entries().read();
-    Iterator<Map.Entry<Integer, Integer>> itr = entries.iterator();
-    Map.Entry<Integer, Integer> entry = itr.next();
-    assertEquals((long) entry.getKey(), 1L);
-    assertEquals((long) entry.getValue(), 12L);
-    entry = itr.next();
-    assertEquals((long) entry.getKey(), 2L);
-    assertEquals((long) entry.getValue(), 22L);
-    assertEquals(false, itr.hasNext());
-
-    mapStateA.remove(1);
-    keys = mapStateA.keys().read();
-    values = mapStateA.values().read();
-    assertThat(keys, containsInAnyOrder(2));
-    assertThat(values, containsInAnyOrder(22));
-
-    entries = mapStateA.entries().read();
-    itr = entries.iterator();
-    entry = itr.next();
-    assertEquals((long) entry.getKey(), 2L);
-    assertEquals((long) entry.getValue(), 22L);
-    assertEquals(false, itr.hasNext());
-  }
-
-  @Test
-  public void testMassiveDataOfBagState() {
-    BagState<Integer> bagStateA = jstormStateInternals.state(
-        StateNamespaces.global(), StateTags.bag("state-id-a", BigEndianIntegerCoder.of()));
-
-    int count = 10000;
-    int n = 1;
-    while (n <= count) {
-      bagStateA.add(n);
-      n++;
-    }
-
-    int readCount = 0;
-    int readN = 0;
-    Iterator<Integer> itr = bagStateA.read().iterator();
-    while (itr.hasNext()) {
-      readN += itr.next();
-      readCount++;
-    }
-
-    assertEquals((long) readN, ((1 + count) * count) / 2);
-    assertEquals((long) readCount, count);
-  }
-}


[12/53] [abbrv] beam git commit: jstorm-runner: fix checkstyles.

Posted by pe...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
index 2d80617..7f98c61 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/UnboundedSourceSpout.java
@@ -17,177 +17,175 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import backtype.storm.spout.SpoutOutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.tuple.Values;
+import com.alibaba.jstorm.utils.KryoSerializer;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
 import org.apache.beam.runners.jstorm.JStormPipelineOptions;
 import org.apache.beam.runners.jstorm.translation.util.CommonInstance;
-import com.alibaba.jstorm.utils.KryoSerializer;
+import org.apache.beam.runners.jstorm.util.SerializedPipelineOptions;
 import org.apache.beam.sdk.io.UnboundedSource;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.TupleTag;
-
-import org.apache.beam.runners.jstorm.util.SerializedPipelineOptions;
-
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Values;
-
 import org.joda.time.Instant;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
 /**
  * Spout implementation that wraps a Beam UnboundedSource
- *
+ * <p>
  * TODO: add wrapper to support metrics in UnboundedSource.
  */
 public class UnboundedSourceSpout extends AdaptorBasicSpout {
-    private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceSpout.class);
-
-    private final String description;
-    private final UnboundedSource source;
-    private final SerializedPipelineOptions serializedOptions;
-    private final TupleTag<?> outputTag;
-
-    private transient JStormPipelineOptions pipelineOptions;
-    private transient UnboundedSource.UnboundedReader reader;
-    private transient SpoutOutputCollector collector;
-
-    private volatile boolean hasNextRecord;
-    private AtomicBoolean activated = new AtomicBoolean();
-
-    private KryoSerializer<WindowedValue> serializer;
-
-    private long lastWaterMark = 0l;
-
-    public UnboundedSourceSpout(
-            String description,
-            UnboundedSource source,
-            JStormPipelineOptions options,
-            TupleTag<?> outputTag) {
-        this.description = checkNotNull(description, "description");
-        this.source = checkNotNull(source, "source");
-        this.serializedOptions = new SerializedPipelineOptions(checkNotNull(options, "options"));
-        this.outputTag = checkNotNull(outputTag, "outputTag");
-    }
-
-    @Override
-    public synchronized void close() {
-        try {
-            activated.set(false);
-            this.reader.close();
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-    }
-
-    @Override
-    public void activate() {
-        activated.set(true);
-        
-    }
-
-    @Override
-    public void deactivate() {
-        activated.set(false);
-    }
-
-    @Override
-    public void ack(Object msgId) {
-        throw new UnsupportedOperationException();
+  private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceSpout.class);
+
+  private final String description;
+  private final UnboundedSource source;
+  private final SerializedPipelineOptions serializedOptions;
+  private final TupleTag<?> outputTag;
+
+  private transient JStormPipelineOptions pipelineOptions;
+  private transient UnboundedSource.UnboundedReader reader;
+  private transient SpoutOutputCollector collector;
+
+  private volatile boolean hasNextRecord;
+  private AtomicBoolean activated = new AtomicBoolean();
+
+  private KryoSerializer<WindowedValue> serializer;
+
+  private long lastWaterMark = 0l;
+
+  public UnboundedSourceSpout(
+      String description,
+      UnboundedSource source,
+      JStormPipelineOptions options,
+      TupleTag<?> outputTag) {
+    this.description = checkNotNull(description, "description");
+    this.source = checkNotNull(source, "source");
+    this.serializedOptions = new SerializedPipelineOptions(checkNotNull(options, "options"));
+    this.outputTag = checkNotNull(outputTag, "outputTag");
+  }
+
+  @Override
+  public synchronized void close() {
+    try {
+      activated.set(false);
+      this.reader.close();
+    } catch (IOException e) {
+      e.printStackTrace();
     }
-
-    @Override
-    public void fail(Object msgId) {
-        throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public void activate() {
+    activated.set(true);
+
+  }
+
+  @Override
+  public void deactivate() {
+    activated.set(false);
+  }
+
+  @Override
+  public void ack(Object msgId) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public void fail(Object msgId) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public Map<String, Object> getComponentConfiguration() {
+    return null;
+  }
+
+  @Override
+  public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+    try {
+      this.collector = collector;
+      this.pipelineOptions =
+          this.serializedOptions.getPipelineOptions().as(JStormPipelineOptions.class);
+
+      createSourceReader(null);
+
+      this.serializer = new KryoSerializer<>(conf);
+    } catch (IOException e) {
+      throw new RuntimeException("Unable to create unbounded reader.", e);
     }
+  }
 
-    @Override
-    public Map<String, Object> getComponentConfiguration() {
-        return null;
+  public void createSourceReader(UnboundedSource.CheckpointMark checkpointMark) throws IOException {
+    if (reader != null) {
+      reader.close();
     }
-
-    @Override
-    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
-        try {
-            this.collector = collector;
-            this.pipelineOptions = this.serializedOptions.getPipelineOptions().as(JStormPipelineOptions.class);
-
-            createSourceReader(null);
-
-            this.serializer = new KryoSerializer<>(conf);
-        } catch (IOException e) {
-            throw new RuntimeException("Unable to create unbounded reader.", e);
-        }
+    reader = this.source.createReader(this.pipelineOptions, checkpointMark);
+    hasNextRecord = this.reader.start();
+  }
+
+  @Override
+  public synchronized void nextTuple() {
+    if (!activated.get()) {
+      return;
     }
-
-    public void createSourceReader(UnboundedSource.CheckpointMark checkpointMark) throws IOException {
-        if (reader != null) {
-            reader.close();
+    try {
+      if (!hasNextRecord) {
+        hasNextRecord = reader.advance();
+      }
+
+      while (hasNextRecord && activated.get()) {
+        Object value = reader.getCurrent();
+        Instant timestamp = reader.getCurrentTimestamp();
+
+        WindowedValue wv =
+            WindowedValue.of(value, timestamp, GlobalWindow.INSTANCE, PaneInfo.NO_FIRING);
+        LOG.debug("Source output: " + wv.getValue());
+        if (keyedEmit(outputTag.getId())) {
+          KV kv = (KV) wv.getValue();
+          // Convert WindowedValue<KV> to <K, WindowedValue<V>>
+          byte[] immutableValue = serializer.serialize(wv.withValue(kv.getValue()));
+          collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableValue));
+        } else {
+          byte[] immutableValue = serializer.serialize(wv);
+          collector.emit(outputTag.getId(), new Values(immutableValue));
         }
-        reader = this.source.createReader(this.pipelineOptions, checkpointMark);
-        hasNextRecord = this.reader.start();
-    }
 
-    @Override
-    public synchronized void nextTuple() {
-        if (!activated.get()) {
-            return;
-        }
-        try {
-            if (!hasNextRecord) {
-                hasNextRecord = reader.advance();
-            }
-
-            while (hasNextRecord && activated.get()) {
-                Object value = reader.getCurrent();
-                Instant timestamp = reader.getCurrentTimestamp();
-
-                WindowedValue wv = WindowedValue.of(value, timestamp, GlobalWindow.INSTANCE, PaneInfo.NO_FIRING);
-                LOG.debug("Source output: " + wv.getValue());
-                if (keyedEmit(outputTag.getId())) {
-                    KV kv = (KV) wv.getValue();
-                    // Convert WindowedValue<KV> to <K, WindowedValue<V>>
-                    byte[] immutableValue = serializer.serialize(wv.withValue(kv.getValue()));
-                    collector.emit(outputTag.getId(), new Values(kv.getKey(), immutableValue));
-                } else {
-                    byte[] immutableValue = serializer.serialize(wv);
-                    collector.emit(outputTag.getId(), new Values(immutableValue));
-                }
-
-                // move to next record
-                hasNextRecord = reader.advance();
-            }
-
-            Instant waterMark = reader.getWatermark();
-            if (waterMark != null && lastWaterMark <  waterMark.getMillis()) {
-                lastWaterMark = waterMark.getMillis();
-                collector.flush();
-                collector.emit(CommonInstance.BEAM_WATERMARK_STREAM_ID, new Values(waterMark.getMillis()));
-                LOG.debug("Source output: WM-{}", waterMark.toDateTime());
-            }
-        } catch (IOException e) {
-            throw new RuntimeException("Exception reading values from source.", e);
-        }
+        // move to next record
+        hasNextRecord = reader.advance();
+      }
+
+      Instant waterMark = reader.getWatermark();
+      if (waterMark != null && lastWaterMark < waterMark.getMillis()) {
+        lastWaterMark = waterMark.getMillis();
+        collector.flush();
+        collector.emit(CommonInstance.BEAM_WATERMARK_STREAM_ID, new Values(waterMark.getMillis()));
+        LOG.debug("Source output: WM-{}", waterMark.toDateTime());
+      }
+    } catch (IOException e) {
+      throw new RuntimeException("Exception reading values from source.", e);
     }
+  }
 
-    public UnboundedSource getUnboundedSource() {
-        return source;
-    }
+  public UnboundedSource getUnboundedSource() {
+    return source;
+  }
 
-    public UnboundedSource.UnboundedReader getUnboundedSourceReader() {
-        return reader;
-    }
+  public UnboundedSource.UnboundedReader getUnboundedSourceReader() {
+    return reader;
+  }
 
-    @Override
-    public String toString() {
-        return description;
-    }
+  @Override
+  public String toString() {
+    return description;
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java
index 7b0e8db..4320967 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/ViewExecutor.java
@@ -26,30 +26,31 @@ import org.apache.beam.sdk.values.TupleTag;
  */
 public class ViewExecutor implements Executor {
 
-    private final String description;
-    private final TupleTag outputTag;
-    private ExecutorsBolt executorsBolt;
-
-    public ViewExecutor(String description, TupleTag outputTag) {
-        this.description = description;
-        this.outputTag = outputTag;
-    }
-
-    @Override
-    public void init(ExecutorContext context) {
-        this.executorsBolt = context.getExecutorsBolt();
-    }
-
-    @Override
-    public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
-        executorsBolt.processExecutorElem(outputTag, elem);
-    }
-
-    @Override
-    public void cleanup() {}
-
-    @Override
-    public String toString() {
-        return description;
-    }
+  private final String description;
+  private final TupleTag outputTag;
+  private ExecutorsBolt executorsBolt;
+
+  public ViewExecutor(String description, TupleTag outputTag) {
+    this.description = description;
+    this.outputTag = outputTag;
+  }
+
+  @Override
+  public void init(ExecutorContext context) {
+    this.executorsBolt = context.getExecutorsBolt();
+  }
+
+  @Override
+  public <T> void process(TupleTag<T> tag, WindowedValue<T> elem) {
+    executorsBolt.processExecutorElem(outputTag, elem);
+  }
+
+  @Override
+  public void cleanup() {
+  }
+
+  @Override
+  public String toString() {
+    return description;
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
index a6c3c16..7f21d26 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/WindowAssignExecutor.java
@@ -17,7 +17,10 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime;
 
+import static com.google.common.base.Preconditions.checkArgument;
+
 import com.google.common.collect.Iterables;
+import java.util.Collection;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.WindowFn;
 import org.apache.beam.sdk.util.WindowedValue;
@@ -26,82 +29,79 @@ import org.joda.time.Instant;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Collection;
-
-import static com.google.common.base.Preconditions.checkArgument;
-
 public class WindowAssignExecutor<T, W extends BoundedWindow> implements Executor {
-    private static final Logger LOG = LoggerFactory.getLogger(WindowAssignExecutor.class);
-
-    private final String description;
-    private WindowFn<T, W> windowFn;
-    private ExecutorsBolt executorsBolt;
-    private TupleTag outputTag;
-
-    class JStormAssignContext<InputT, W extends BoundedWindow>
-            extends WindowFn<InputT, W>.AssignContext {
-        private final WindowedValue<InputT> value;
-
-        JStormAssignContext(WindowFn<InputT, W> fn, WindowedValue<InputT> value) {
-            fn.super();
-            checkArgument(
-                    Iterables.size(value.getWindows()) == 1,
-                    String.format(
-                            "%s passed to window assignment must be in a single window, but it was in %s: %s",
-                            WindowedValue.class.getSimpleName(),
-                            Iterables.size(value.getWindows()),
-                            value.getWindows()));
-            this.value = value;
-        }
-
-        @Override
-        public InputT element() {
-            return value.getValue();
-        }
-
-        @Override
-        public Instant timestamp() {
-            return value.getTimestamp();
-        }
-
-        @Override
-        public BoundedWindow window() {
-            return Iterables.getOnlyElement(value.getWindows());
-        }
+  private static final Logger LOG = LoggerFactory.getLogger(WindowAssignExecutor.class);
+
+  private final String description;
+  private WindowFn<T, W> windowFn;
+  private ExecutorsBolt executorsBolt;
+  private TupleTag outputTag;
+
+  class JStormAssignContext<InputT, W extends BoundedWindow>
+      extends WindowFn<InputT, W>.AssignContext {
+    private final WindowedValue<InputT> value;
+
+    JStormAssignContext(WindowFn<InputT, W> fn, WindowedValue<InputT> value) {
+      fn.super();
+      checkArgument(
+          Iterables.size(value.getWindows()) == 1,
+          String.format(
+              "%s passed to window assignment must be in a single window, but it was in %s: %s",
+              WindowedValue.class.getSimpleName(),
+              Iterables.size(value.getWindows()),
+              value.getWindows()));
+      this.value = value;
     }
 
-    public WindowAssignExecutor(String description, WindowFn<T, W> windowFn, TupleTag outputTag) {
-        this.description = description;
-        this.windowFn = windowFn;
-        this.outputTag = outputTag;
+    @Override
+    public InputT element() {
+      return value.getValue();
     }
 
     @Override
-    public void init(ExecutorContext context) {
-        this.executorsBolt = context.getExecutorsBolt();
+    public Instant timestamp() {
+      return value.getTimestamp();
     }
 
     @Override
-    public void process(TupleTag tag, WindowedValue elem) {
-        Collection<W> windows = null;
-        try {
-            windows = windowFn.assignWindows(new JStormAssignContext<>(windowFn, elem));
-            for (W window: windows) {
-                executorsBolt.processExecutorElem(
-                        outputTag,
-                        WindowedValue.of(elem.getValue(), elem.getTimestamp(), window, elem.getPane()));
-            }
-        } catch (Exception e) {
-            LOG.warn("Failed to assign windows for elem=" + elem, e);
-        }
+    public BoundedWindow window() {
+      return Iterables.getOnlyElement(value.getWindows());
     }
+  }
+
+  public WindowAssignExecutor(String description, WindowFn<T, W> windowFn, TupleTag outputTag) {
+    this.description = description;
+    this.windowFn = windowFn;
+    this.outputTag = outputTag;
+  }
+
+  @Override
+  public void init(ExecutorContext context) {
+    this.executorsBolt = context.getExecutorsBolt();
+  }
+
+  @Override
+  public void process(TupleTag tag, WindowedValue elem) {
+    Collection<W> windows = null;
+    try {
+      windows = windowFn.assignWindows(new JStormAssignContext<>(windowFn, elem));
+      for (W window : windows) {
+        executorsBolt.processExecutorElem(
+            outputTag,
+            WindowedValue.of(elem.getValue(), elem.getTimestamp(), window, elem.getPane()));
+      }
+    } catch (Exception e) {
+      LOG.warn("Failed to assign windows for elem=" + elem, e);
+    }
+  }
 
-    @Override
-    public void cleanup() {}
+  @Override
+  public void cleanup() {
+  }
 
 
-    @Override
-    public String toString() {
-        return description;
-    }
+  @Override
+  public String toString() {
+    return description;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
index eaf0549..1466f35 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormBagState.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,162 +17,161 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime.state;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 import com.alibaba.jstorm.cache.ComposedKey;
 import com.alibaba.jstorm.cache.IKvStore;
 import com.alibaba.jstorm.cache.KvStoreIterable;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import javax.annotation.Nullable;
 import org.apache.beam.runners.core.StateNamespace;
 import org.apache.beam.sdk.state.BagState;
 import org.apache.beam.sdk.state.ReadableState;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nullable;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
 /**
  * JStorm implementation of {@link BagState}.
  */
 class JStormBagState<K, T> implements BagState<T> {
-    private static final Logger LOG = LoggerFactory.getLogger(JStormBagState.class);
-
-    @Nullable
-    private final K key;
-    private final StateNamespace namespace;
-    private final IKvStore<ComposedKey, T> kvState;
-    private final IKvStore<ComposedKey, Object> stateInfoKvState;
-    private int elemIndex;
-
-    public JStormBagState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState,
-                           IKvStore<ComposedKey, Object> stateInfoKvState) throws IOException {
-        this.key = key;
-        this.namespace = checkNotNull(namespace, "namespace");
-        this.kvState = checkNotNull(kvState, "kvState");
-        this.stateInfoKvState = checkNotNull(stateInfoKvState, "stateInfoKvState");
-
-        Integer index = (Integer) stateInfoKvState.get(getComposedKey());
-        this.elemIndex =  index != null ? ++index : 0;
+  private static final Logger LOG = LoggerFactory.getLogger(JStormBagState.class);
+
+  @Nullable
+  private final K key;
+  private final StateNamespace namespace;
+  private final IKvStore<ComposedKey, T> kvState;
+  private final IKvStore<ComposedKey, Object> stateInfoKvState;
+  private int elemIndex;
+
+  public JStormBagState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState,
+                        IKvStore<ComposedKey, Object> stateInfoKvState) throws IOException {
+    this.key = key;
+    this.namespace = checkNotNull(namespace, "namespace");
+    this.kvState = checkNotNull(kvState, "kvState");
+    this.stateInfoKvState = checkNotNull(stateInfoKvState, "stateInfoKvState");
+
+    Integer index = (Integer) stateInfoKvState.get(getComposedKey());
+    this.elemIndex = index != null ? ++index : 0;
+  }
+
+  @Override
+  public void add(T input) {
+    try {
+      kvState.put(getComposedKey(elemIndex), input);
+      stateInfoKvState.put(getComposedKey(), elemIndex);
+      elemIndex++;
+    } catch (IOException e) {
+      throw new RuntimeException(e.getCause());
     }
-
-    @Override
-    public void add(T input) {
-        try {
-            kvState.put(getComposedKey(elemIndex), input);
-            stateInfoKvState.put(getComposedKey(), elemIndex);
-            elemIndex++;
-        } catch (IOException e) {
-            throw new RuntimeException(e.getCause());
-        }
+  }
+
+  @Override
+  public ReadableState<Boolean> isEmpty() {
+    return new ReadableState<Boolean>() {
+      @Override
+      public Boolean read() {
+        return elemIndex <= 0;
+      }
+
+      @Override
+      public ReadableState<Boolean> readLater() {
+        // TODO: support prefetch.
+        return this;
+      }
+    };
+  }
+
+  @Override
+  public Iterable<T> read() {
+    return new BagStateIterable(elemIndex);
+  }
+
+  @Override
+  public BagState readLater() {
+    // TODO: support prefetch.
+    return this;
+  }
+
+  @Override
+  public void clear() {
+    try {
+      for (int i = 0; i < elemIndex; i++) {
+        kvState.remove(getComposedKey(i));
+      }
+      stateInfoKvState.remove(getComposedKey());
+      elemIndex = 0;
+    } catch (IOException e) {
+      throw new RuntimeException(e.getCause());
     }
+  }
 
-    @Override
-    public ReadableState<Boolean> isEmpty() {
-        return new ReadableState<Boolean>() {
-            @Override
-            public Boolean read() {
-                return elemIndex <= 0;
-            }
-
-            @Override
-            public ReadableState<Boolean> readLater() {
-                // TODO: support prefetch.
-                return this;
-            }
-        };
-    }
+  private ComposedKey getComposedKey() {
+    return ComposedKey.of(key, namespace);
+  }
 
-    @Override
-    public Iterable<T> read() {
-        return new BagStateIterable(elemIndex);
-    }
+  private ComposedKey getComposedKey(int elemIndex) {
+    return ComposedKey.of(key, namespace, elemIndex);
+  }
 
-    @Override
-    public BagState readLater() {
-        // TODO: support prefetch.
-        return this;
-    }
+  private class BagStateIterable implements KvStoreIterable<T> {
 
-    @Override
-    public void clear() {
+    private class BagStateIterator implements Iterator<T> {
+      private final int size;
+      private int cursor = 0;
+
+      BagStateIterator() {
+        Integer s = null;
         try {
-            for (int i = 0; i < elemIndex; i++) {
-                kvState.remove(getComposedKey(i));
-            }
-            stateInfoKvState.remove(getComposedKey());
-            elemIndex = 0;
+          s = (Integer) stateInfoKvState.get(getComposedKey());
         } catch (IOException e) {
-            throw new RuntimeException(e.getCause());
+          LOG.error("Failed to get elemIndex for key={}", getComposedKey());
+        }
+        this.size = s != null ? ++s : 0;
+      }
+
+      @Override
+      public boolean hasNext() {
+        return cursor < size;
+      }
+
+      @Override
+      public T next() {
+        if (cursor >= size) {
+          throw new NoSuchElementException();
         }
-    }
-
-    private ComposedKey getComposedKey() {
-        return ComposedKey.of(key, namespace);
-    }
-
-    private ComposedKey getComposedKey(int elemIndex) {
-        return ComposedKey.of(key, namespace, elemIndex);
-    }
 
-    private class BagStateIterable implements KvStoreIterable<T> {
-
-        private class BagStateIterator implements Iterator<T> {
-            private final int size;
-            private int cursor = 0;
-
-            BagStateIterator() {
-                Integer s = null;
-                try {
-                    s = (Integer) stateInfoKvState.get(getComposedKey());
-                } catch (IOException e) {
-                    LOG.error("Failed to get elemIndex for key={}", getComposedKey());
-                }
-                this.size = s != null ? ++s : 0;
-            }
-
-            @Override
-            public boolean hasNext() {
-                return cursor < size;
-            }
-
-            @Override
-            public T next() {
-                if (cursor >= size) {
-                    throw new NoSuchElementException();
-                }
-
-                T value = null;
-                try {
-                    value = kvState.get(getComposedKey(cursor));
-                } catch (IOException e) {
-                    LOG.error("Failed to read composed key-[{}]", getComposedKey(cursor));
-                }
-                cursor++;
-                return value;
-            }
-
-            @Override
-            public void remove() {
-                throw new UnsupportedOperationException();
-            }
+        T value = null;
+        try {
+          value = kvState.get(getComposedKey(cursor));
+        } catch (IOException e) {
+          LOG.error("Failed to read composed key-[{}]", getComposedKey(cursor));
         }
+        cursor++;
+        return value;
+      }
+
+      @Override
+      public void remove() {
+        throw new UnsupportedOperationException();
+      }
+    }
 
-        private final int size;
+    private final int size;
 
-        BagStateIterable(int size) {
-            this.size = size;
-        }
+    BagStateIterable(int size) {
+      this.size = size;
+    }
 
-        @Override
-        public Iterator<T> iterator() {
-            return new BagStateIterator();
-        }
+    @Override
+    public Iterator<T> iterator() {
+      return new BagStateIterator();
+    }
 
-        @Override
-        public String toString() {
-            return String.format("BagStateIterable: composedKey=%s", getComposedKey());
-        }
+    @Override
+    public String toString() {
+      return String.format("BagStateIterable: composedKey=%s", getComposedKey());
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java
index b0fe29b..7c6a239 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormCombiningState.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -20,7 +20,6 @@ package org.apache.beam.runners.jstorm.translation.runtime.state;
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import javax.annotation.Nullable;
-
 import org.apache.beam.sdk.state.BagState;
 import org.apache.beam.sdk.state.CombiningState;
 import org.apache.beam.sdk.state.ReadableState;
@@ -30,59 +29,60 @@ import org.apache.beam.sdk.transforms.Combine;
  * JStorm implementation of {@link CombiningState}.
  */
 public class JStormCombiningState<InputT, AccumT, OutputT>
-        implements CombiningState<InputT, AccumT, OutputT> {
+    implements CombiningState<InputT, AccumT, OutputT> {
+
+  @Nullable
+  private final BagState<AccumT> accumBagState;
+  private final Combine.CombineFn<InputT, AccumT, OutputT> combineFn;
 
-    @Nullable
-    private final BagState<AccumT> accumBagState;
-    private final Combine.CombineFn<InputT, AccumT, OutputT> combineFn;
-    JStormCombiningState(
-            BagState<AccumT> accumBagState,
-            Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
-        this.accumBagState = checkNotNull(accumBagState, "accumBagState");
-        this.combineFn = checkNotNull(combineFn, "combineFn");
-    }
+  JStormCombiningState(
+      BagState<AccumT> accumBagState,
+      Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
+    this.accumBagState = checkNotNull(accumBagState, "accumBagState");
+    this.combineFn = checkNotNull(combineFn, "combineFn");
+  }
 
-    @Override
-    public AccumT getAccum() {
-        // TODO: replacing the accumBagState with the merged accum.
-        return combineFn.mergeAccumulators(accumBagState.read());
-    }
+  @Override
+  public AccumT getAccum() {
+    // TODO: replacing the accumBagState with the merged accum.
+    return combineFn.mergeAccumulators(accumBagState.read());
+  }
 
-    @Override
-    public void addAccum(AccumT accumT) {
-        accumBagState.add(accumT);
-    }
+  @Override
+  public void addAccum(AccumT accumT) {
+    accumBagState.add(accumT);
+  }
 
-    @Override
-    public AccumT mergeAccumulators(Iterable<AccumT> iterable) {
-        return combineFn.mergeAccumulators(iterable);
-    }
+  @Override
+  public AccumT mergeAccumulators(Iterable<AccumT> iterable) {
+    return combineFn.mergeAccumulators(iterable);
+  }
 
-    @Override
-    public void add(InputT input) {
-        accumBagState.add(
-                combineFn.addInput(combineFn.createAccumulator(), input));
-    }
+  @Override
+  public void add(InputT input) {
+    accumBagState.add(
+        combineFn.addInput(combineFn.createAccumulator(), input));
+  }
 
-    @Override
-    public ReadableState<Boolean> isEmpty() {
-        return accumBagState.isEmpty();
-    }
+  @Override
+  public ReadableState<Boolean> isEmpty() {
+    return accumBagState.isEmpty();
+  }
 
-    @Override
-    public OutputT read() {
-        return combineFn.extractOutput(
-            combineFn.mergeAccumulators(accumBagState.read()));
-    }
+  @Override
+  public OutputT read() {
+    return combineFn.extractOutput(
+        combineFn.mergeAccumulators(accumBagState.read()));
+  }
 
-    @Override
-    public CombiningState<InputT, AccumT, OutputT> readLater() {
-        // TODO: support prefetch.
-        return this;
-    }
+  @Override
+  public CombiningState<InputT, AccumT, OutputT> readLater() {
+    // TODO: support prefetch.
+    return this;
+  }
 
-    @Override
-    public void clear() {
-        accumBagState.clear();
-    }
+  @Override
+  public void clear() {
+    accumBagState.clear();
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
index f101beb..f1c1ed0 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormMapState.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,137 +18,136 @@
 package org.apache.beam.runners.jstorm.translation.runtime.state;
 
 import com.alibaba.jstorm.cache.IKvStore;
+import java.io.IOException;
+import java.util.Map;
 import org.apache.beam.runners.core.StateNamespace;
 import org.apache.beam.sdk.state.MapState;
 import org.apache.beam.sdk.state.ReadableState;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.Map;
-
 public class JStormMapState<K, V> implements MapState<K, V> {
-    private static final Logger LOG = LoggerFactory.getLogger(JStormMapState.class);
-
-    private final K key;
-    private final StateNamespace namespace;
-    private IKvStore<K, V> kvStore;
-
-    public JStormMapState(K key, StateNamespace namespace, IKvStore<K, V> kvStore) {
-        this.key = key;
-        this.namespace = namespace;
-        this.kvStore = kvStore;
+  private static final Logger LOG = LoggerFactory.getLogger(JStormMapState.class);
+
+  private final K key;
+  private final StateNamespace namespace;
+  private IKvStore<K, V> kvStore;
+
+  public JStormMapState(K key, StateNamespace namespace, IKvStore<K, V> kvStore) {
+    this.key = key;
+    this.namespace = namespace;
+    this.kvStore = kvStore;
+  }
+
+  @Override
+  public void put(K var1, V var2) {
+    try {
+      kvStore.put(var1, var2);
+    } catch (IOException e) {
+      reportError(String.format("Failed to put key=%s, value=%s", var1, var2), e);
     }
-
-    @Override
-    public void put(K var1, V var2) {
-        try {
-            kvStore.put(var1, var2);
-        } catch (IOException e) {
-            reportError(String.format("Failed to put key=%s, value=%s", var1, var2), e);
-        }
+  }
+
+  @Override
+  public ReadableState<V> putIfAbsent(K var1, V var2) {
+    ReadableState<V> ret = null;
+    try {
+      V value = kvStore.get(var1);
+      if (value == null) {
+        kvStore.put(var1, var2);
+        ret = new MapReadableState<>(null);
+      } else {
+        ret = new MapReadableState<>(value);
+      }
+    } catch (IOException e) {
+      reportError(String.format("Failed to putIfAbsent key=%s, value=%s", var1, var2), e);
     }
-
-    @Override
-    public ReadableState<V> putIfAbsent(K var1, V var2) {
-        ReadableState<V> ret = null;
-        try {
-            V value = kvStore.get(var1);
-            if (value == null) {
-                kvStore.put(var1, var2);
-                ret = new MapReadableState<>(null);
-            } else {
-                ret = new MapReadableState<>(value);
-            }
-        } catch (IOException e) {
-            reportError(String.format("Failed to putIfAbsent key=%s, value=%s", var1, var2), e);
-        }
-        return ret;
+    return ret;
+  }
+
+  @Override
+  public void remove(K var1) {
+    try {
+      kvStore.remove(var1);
+    } catch (IOException e) {
+      reportError(String.format("Failed to remove key=%s", var1), e);
     }
-
-    @Override
-    public void remove(K var1) {
-        try {
-            kvStore.remove(var1);
-        } catch (IOException e) {
-            reportError(String.format("Failed to remove key=%s", var1), e);
-        }
+  }
+
+  @Override
+  public ReadableState<V> get(K var1) {
+    ReadableState<V> ret = new MapReadableState<>(null);
+    try {
+      ret = new MapReadableState(kvStore.get(var1));
+    } catch (IOException e) {
+      reportError(String.format("Failed to get value for key=%s", var1), e);
     }
-
-    @Override
-    public ReadableState<V> get(K var1) {
-        ReadableState<V> ret = new MapReadableState<>(null);
-        try {
-            ret = new MapReadableState(kvStore.get(var1));
-        } catch (IOException e) {
-            reportError(String.format("Failed to get value for key=%s", var1), e);
-        }
-        return ret;
+    return ret;
+  }
+
+  @Override
+  public ReadableState<Iterable<K>> keys() {
+    ReadableState<Iterable<K>> ret = new MapReadableState<>(null);
+    try {
+      ret = new MapReadableState<>(kvStore.keys());
+    } catch (IOException e) {
+      reportError(String.format("Failed to get keys"), e);
     }
-
-    @Override
-    public ReadableState<Iterable<K>> keys() {
-        ReadableState<Iterable<K>> ret = new MapReadableState<>(null);
-        try {
-            ret = new MapReadableState<>(kvStore.keys());
-        } catch (IOException e) {
-            reportError(String.format("Failed to get keys"), e);
-        }
-        return ret;
+    return ret;
+  }
+
+  @Override
+  public ReadableState<Iterable<V>> values() {
+    ReadableState<Iterable<V>> ret = new MapReadableState<>(null);
+    try {
+      ret = new MapReadableState<>(kvStore.values());
+    } catch (IOException e) {
+      reportError(String.format("Failed to get values"), e);
     }
-
-    @Override
-    public ReadableState<Iterable<V>> values() {
-        ReadableState<Iterable<V>> ret = new MapReadableState<>(null);
-        try {
-            ret = new MapReadableState<>(kvStore.values());
-        } catch (IOException e) {
-            reportError(String.format("Failed to get values"), e);
-        }
-        return ret;
-    }
-
-    @Override
-    public ReadableState<Iterable<Map.Entry<K, V>>> entries() {
-        ReadableState<Iterable<Map.Entry<K, V>>> ret = new MapReadableState<>(null);
-        try {
-            ret = new MapReadableState<>(kvStore.entries());
-        } catch (IOException e) {
-            reportError(String.format("Failed to get values"), e);
-        }
-        return ret;
+    return ret;
+  }
+
+  @Override
+  public ReadableState<Iterable<Map.Entry<K, V>>> entries() {
+    ReadableState<Iterable<Map.Entry<K, V>>> ret = new MapReadableState<>(null);
+    try {
+      ret = new MapReadableState<>(kvStore.entries());
+    } catch (IOException e) {
+      reportError(String.format("Failed to get values"), e);
     }
-
-    @Override
-    public void clear() {
-        try {
-            Iterable<K> keys = kvStore.keys();
-            kvStore.removeBatch(keys);
-        } catch (IOException e) {
-            reportError(String.format("Failed to clear map state"), e);
-        }
+    return ret;
+  }
+
+  @Override
+  public void clear() {
+    try {
+      Iterable<K> keys = kvStore.keys();
+      kvStore.removeBatch(keys);
+    } catch (IOException e) {
+      reportError(String.format("Failed to clear map state"), e);
     }
+  }
 
-    private void reportError(String errorInfo, IOException e) {
-        LOG.error(errorInfo, e);
-        throw new RuntimeException(errorInfo);
-    }
+  private void reportError(String errorInfo, IOException e) {
+    LOG.error(errorInfo, e);
+    throw new RuntimeException(errorInfo);
+  }
 
-    private class MapReadableState<T> implements ReadableState<T> {
-        private T value;
+  private class MapReadableState<T> implements ReadableState<T> {
+    private T value;
 
-        public MapReadableState(T value) {
-            this.value = value;
-        }
+    public MapReadableState(T value) {
+      this.value = value;
+    }
 
-        @Override
-        public T read() {
-            return value;
-        }
+    @Override
+    public T read() {
+      return value;
+    }
 
-        @Override
-        public ReadableState<T> readLater() {
-            return this;
-        }
+    @Override
+    public ReadableState<T> readLater() {
+      return this;
     }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java
index 8a0cb73..80ef3a2 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormStateInternals.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,13 +17,16 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime.state;
 
-import org.apache.beam.runners.jstorm.translation.runtime.TimerService;
+import static com.google.common.base.Preconditions.checkNotNull;
+
 import com.alibaba.jstorm.cache.ComposedKey;
 import com.alibaba.jstorm.cache.IKvStoreManager;
-
+import java.io.IOException;
+import javax.annotation.Nullable;
 import org.apache.beam.runners.core.StateInternals;
 import org.apache.beam.runners.core.StateNamespace;
 import org.apache.beam.runners.core.StateTag;
+import org.apache.beam.runners.jstorm.translation.runtime.TimerService;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.state.BagState;
 import org.apache.beam.sdk.state.CombiningState;
@@ -41,151 +44,148 @@ import org.apache.beam.sdk.transforms.CombineWithContext;
 import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
 import org.joda.time.Instant;
 
-import javax.annotation.Nullable;
-import java.io.IOException;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
 /**
  * JStorm implementation of {@link StateInternals}.
  */
 public class JStormStateInternals<K> implements StateInternals {
 
-    private static final String STATE_INFO = "state-info:";
-
-    @Nullable
-    private final K key;
-    private final IKvStoreManager kvStoreManager;
-    private final TimerService timerService;
-    private final int executorId;
-
-    public JStormStateInternals(K key, IKvStoreManager kvStoreManager,
-                                TimerService timerService, int executorId) {
-        this.key = key;
-        this.kvStoreManager = checkNotNull(kvStoreManager, "kvStoreManager");
-        this.timerService = checkNotNull(timerService, "timerService");
-        this.executorId = executorId;
-    }
-
-    @Nullable
-    @Override
-    public K getKey() {
-        return key;
-    }
-
-    @Override
-    public <T extends State> T state(
-        StateNamespace namespace, StateTag<T> address, StateContext<?> c) {
-        // throw new UnsupportedOperationException("StateContext is not supported.");
-        /**
-         * TODO:
-         * Same implementation as state() which is without StateContext. This might be updated after
-         * we figure out if we really need StateContext for JStorm state internals.
-         */
-        return state(namespace, address);
-    }
-
-    @Override
-    public <T extends State> T state(final StateNamespace namespace, StateTag<T> address) {
-        return address.getSpec().bind(address.getId(), new StateBinder() {
-            @Override
-            public <T> ValueState<T> bindValue(String id, StateSpec<ValueState<T>> spec, Coder<T> coder) {
-                try {
-                    return new JStormValueState<>(
-                            getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)));
-                } catch (IOException e) {
-                    throw new RuntimeException();
-                }
-            }
-
-            @Override
-            public <T> BagState<T> bindBag(String id, StateSpec<BagState<T>> spec, Coder<T> elemCoder) {
-                try {
-                    return new JStormBagState(
-                            getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)),
-                            kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
-                } catch (IOException e) {
-                    throw new RuntimeException();
+  private static final String STATE_INFO = "state-info:";
+
+  @Nullable
+  private final K key;
+  private final IKvStoreManager kvStoreManager;
+  private final TimerService timerService;
+  private final int executorId;
+
+  public JStormStateInternals(K key, IKvStoreManager kvStoreManager,
+                              TimerService timerService, int executorId) {
+    this.key = key;
+    this.kvStoreManager = checkNotNull(kvStoreManager, "kvStoreManager");
+    this.timerService = checkNotNull(timerService, "timerService");
+    this.executorId = executorId;
+  }
+
+  @Nullable
+  @Override
+  public K getKey() {
+    return key;
+  }
+
+  @Override
+  public <T extends State> T state(
+      StateNamespace namespace, StateTag<T> address, StateContext<?> c) {
+    // throw new UnsupportedOperationException("StateContext is not supported.");
+    /**
+     * TODO:
+     * Same implementation as state() which is without StateContext. This might be updated after
+     * we figure out if we really need StateContext for JStorm state internals.
+     */
+    return state(namespace, address);
+  }
+
+  @Override
+  public <T extends State> T state(final StateNamespace namespace, StateTag<T> address) {
+    return address.getSpec().bind(address.getId(), new StateBinder() {
+      @Override
+      public <T> ValueState<T> bindValue(String id, StateSpec<ValueState<T>> spec, Coder<T> coder) {
+        try {
+          return new JStormValueState<>(
+              getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)));
+        } catch (IOException e) {
+          throw new RuntimeException();
+        }
+      }
+
+      @Override
+      public <T> BagState<T> bindBag(String id, StateSpec<BagState<T>> spec, Coder<T> elemCoder) {
+        try {
+          return new JStormBagState(
+              getKey(), namespace, kvStoreManager.<ComposedKey, T>getOrCreate(getStoreId(id)),
+              kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
+        } catch (IOException e) {
+          throw new RuntimeException();
+        }
+      }
+
+      @Override
+      public <T> SetState<T> bindSet(String id, StateSpec<SetState<T>> spec, Coder<T> elemCoder) {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public <KeyT, ValueT> MapState<KeyT, ValueT> bindMap(
+          String id,
+          StateSpec<MapState<KeyT, ValueT>> spec,
+          Coder<KeyT> mapKeyCoder,
+          Coder<ValueT> mapValueCoder) {
+        try {
+          return new JStormMapState<>(
+              getKey(), namespace, kvStoreManager.<KeyT, ValueT>getOrCreate(getStoreId(id)));
+        } catch (IOException e) {
+          throw new RuntimeException(e);
+        }
+      }
+
+      @Override
+      public <InputT, AccumT, OutputT> CombiningState bindCombining(
+          String id,
+          StateSpec<CombiningState<InputT, AccumT, OutputT>> spec,
+          Coder<AccumT> accumCoder,
+          Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
+        try {
+          BagState<AccumT> accumBagState = new JStormBagState(
+              getKey(), namespace,
+              kvStoreManager.<ComposedKey, AccumT>getOrCreate(getStoreId(id)),
+              kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
+          return new JStormCombiningState<>(accumBagState, combineFn);
+        } catch (IOException e) {
+          throw new RuntimeException();
+        }
+      }
+
+
+      @Override
+      public <InputT, AccumT, OutputT> CombiningState<InputT, AccumT, OutputT>
+      bindCombiningWithContext(
+          String id,
+          StateSpec<CombiningState<InputT, AccumT, OutputT>> stateSpec, Coder<AccumT> coder,
+          CombineWithContext.CombineFnWithContext<InputT, AccumT, OutputT> combineFnWithContext) {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public WatermarkHoldState bindWatermark(
+          String id,
+          StateSpec<WatermarkHoldState> spec,
+          final TimestampCombiner timestampCombiner) {
+        try {
+          BagState<Combine.Holder<Instant>> accumBagState = new JStormBagState(
+              getKey(), namespace,
+              kvStoreManager.<ComposedKey, Combine.Holder<Instant>>getOrCreate(getStoreId(id)),
+              kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
+
+          Combine.CombineFn<Instant, Combine.Holder<Instant>, Instant> outputTimeCombineFn =
+              new BinaryCombineFn<Instant>() {
+                @Override
+                public Instant apply(Instant left, Instant right) {
+                  return timestampCombiner.combine(left, right);
                 }
-            }
-
-            @Override
-            public <T> SetState<T> bindSet(String id, StateSpec<SetState<T>> spec, Coder<T> elemCoder) {
-                throw new UnsupportedOperationException();
-            }
-
-            @Override
-            public <KeyT, ValueT> MapState<KeyT, ValueT> bindMap(
-                String id,
-                StateSpec<MapState<KeyT, ValueT>> spec,
-                Coder<KeyT> mapKeyCoder,
-                Coder<ValueT> mapValueCoder) {
-                try {
-                    return new JStormMapState<>(getKey(), namespace, kvStoreManager.<KeyT, ValueT>getOrCreate(getStoreId(id)));
-                } catch (IOException e) {
-                    throw new RuntimeException(e);
-                }
-            }
-
-            @Override
-            public <InputT, AccumT, OutputT> CombiningState bindCombining(
-                    String id,
-                    StateSpec<CombiningState<InputT, AccumT, OutputT>> spec,
-                    Coder<AccumT> accumCoder,
-                    Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
-                try {
-                    BagState<AccumT> accumBagState = new JStormBagState(
-                            getKey(), namespace,
-                            kvStoreManager.<ComposedKey, AccumT>getOrCreate(getStoreId(id)),
-                            kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
-                    return new JStormCombiningState<>(accumBagState, combineFn);
-                } catch (IOException e) {
-                    throw new RuntimeException();
-                }
-            }
-
-
-            @Override
-            public <InputT, AccumT, OutputT> CombiningState<InputT, AccumT, OutputT>
-            bindCombiningWithContext(
-                String id,
-                StateSpec<CombiningState<InputT, AccumT, OutputT>> stateSpec, Coder<AccumT> coder,
-                CombineWithContext.CombineFnWithContext<InputT, AccumT, OutputT> combineFnWithContext) {
-                throw new UnsupportedOperationException();
-            }
-
-            @Override
-            public WatermarkHoldState bindWatermark(
-                String id,
-                StateSpec<WatermarkHoldState> spec,
-                final TimestampCombiner timestampCombiner) {
-                try {
-                    BagState<Combine.Holder<Instant>> accumBagState = new JStormBagState(
-                            getKey(), namespace,
-                            kvStoreManager.<ComposedKey, Combine.Holder<Instant>>getOrCreate(getStoreId(id)),
-                            kvStoreManager.<ComposedKey, Object>getOrCreate(STATE_INFO + getStoreId(id)));
-
-                    Combine.CombineFn<Instant, Combine.Holder<Instant>, Instant> outputTimeCombineFn =
-                            new BinaryCombineFn<Instant>() {
-                                @Override
-                                public Instant apply(Instant left, Instant right) {
-                                  return timestampCombiner.combine(left, right);
-                                }};
-                    return new JStormWatermarkHoldState(
-                            namespace,
-                            new JStormCombiningState<>(
-                                    accumBagState,
-                                    outputTimeCombineFn),
-                            timestampCombiner,
-                            timerService);
-                } catch (IOException e) {
-                    throw new RuntimeException();
-                }
-            }
-        });
-    }
-
-    private String getStoreId(String stateId) {
-        return String.format("%s-%s", stateId, executorId);
-    }
+              };
+          return new JStormWatermarkHoldState(
+              namespace,
+              new JStormCombiningState<>(
+                  accumBagState,
+                  outputTimeCombineFn),
+              timestampCombiner,
+              timerService);
+        } catch (IOException e) {
+          throw new RuntimeException();
+        }
+      }
+    });
+  }
+
+  private String getStoreId(String stateId) {
+    return String.format("%s-%s", stateId, executorId);
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java
index 5ad3663..79ff6b4 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormValueState.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -19,66 +19,64 @@ package org.apache.beam.runners.jstorm.translation.runtime.state;
 
 import com.alibaba.jstorm.cache.ComposedKey;
 import com.alibaba.jstorm.cache.IKvStore;
-
+import java.io.IOException;
+import javax.annotation.Nullable;
 import org.apache.beam.runners.core.StateNamespace;
 import org.apache.beam.sdk.state.ValueState;
 
-import javax.annotation.Nullable;
-import java.io.IOException;
-
 /**
  * JStorm implementation of {@link ValueState}.
  */
 public class JStormValueState<K, T> implements ValueState<T> {
 
-    @Nullable
-    private final K key;
-    private final StateNamespace namespace;
-    private final IKvStore<ComposedKey, T> kvState;
+  @Nullable
+  private final K key;
+  private final StateNamespace namespace;
+  private final IKvStore<ComposedKey, T> kvState;
 
-    JStormValueState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState) {
-        this.key = key;
-        this.namespace = namespace;
-        this.kvState = kvState;
-    }
+  JStormValueState(@Nullable K key, StateNamespace namespace, IKvStore<ComposedKey, T> kvState) {
+    this.key = key;
+    this.namespace = namespace;
+    this.kvState = kvState;
+  }
 
-    @Override
-    public void write(T t) {
-        try {
-            kvState.put(getComposedKey(), t);
-        } catch (IOException e) {
-            throw new RuntimeException(String.format(
-                    "Failed to write key: %s, namespace: %s, value: %s.", key, namespace, t));
-        }
+  @Override
+  public void write(T t) {
+    try {
+      kvState.put(getComposedKey(), t);
+    } catch (IOException e) {
+      throw new RuntimeException(String.format(
+          "Failed to write key: %s, namespace: %s, value: %s.", key, namespace, t));
     }
+  }
 
-    @Override
-    public T read() {
-        try {
-            return kvState.get(getComposedKey());
-        } catch (IOException e) {
-            throw new RuntimeException(String.format(
-                    "Failed to read key: %s, namespace: %s.", key, namespace));
-        }
+  @Override
+  public T read() {
+    try {
+      return kvState.get(getComposedKey());
+    } catch (IOException e) {
+      throw new RuntimeException(String.format(
+          "Failed to read key: %s, namespace: %s.", key, namespace));
     }
+  }
 
-    @Override
-    public ValueState<T> readLater() {
-        // TODO: support prefetch.
-        return this;
-    }
+  @Override
+  public ValueState<T> readLater() {
+    // TODO: support prefetch.
+    return this;
+  }
 
-    @Override
-    public void clear() {
-        try {
-            kvState.remove(getComposedKey());
-        } catch (IOException e) {
-            throw new RuntimeException(String.format(
-                    "Failed to clear key: %s, namespace: %s.", key, namespace));
-        }
+  @Override
+  public void clear() {
+    try {
+      kvState.remove(getComposedKey());
+    } catch (IOException e) {
+      throw new RuntimeException(String.format(
+          "Failed to clear key: %s, namespace: %s.", key, namespace));
     }
+  }
 
-    private ComposedKey getComposedKey() {
-        return ComposedKey.of(key, namespace);
-    }
+  private ComposedKey getComposedKey() {
+    return ComposedKey.of(key, namespace);
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java
index 659d77c..dc3ba43 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/state/JStormWatermarkHoldState.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -19,8 +19,8 @@ package org.apache.beam.runners.jstorm.translation.runtime.state;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 
-import org.apache.beam.runners.jstorm.translation.runtime.TimerService;
 import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.runners.jstorm.translation.runtime.TimerService;
 import org.apache.beam.sdk.state.GroupingState;
 import org.apache.beam.sdk.state.ReadableState;
 import org.apache.beam.sdk.state.WatermarkHoldState;
@@ -32,52 +32,52 @@ import org.joda.time.Instant;
  */
 public class JStormWatermarkHoldState implements WatermarkHoldState {
 
-    private final StateNamespace namespace;
-    private final GroupingState<Instant, Instant> watermarkHoldsState;
-    private final TimestampCombiner timestampCombiner;
-    private final TimerService timerService;
+  private final StateNamespace namespace;
+  private final GroupingState<Instant, Instant> watermarkHoldsState;
+  private final TimestampCombiner timestampCombiner;
+  private final TimerService timerService;
 
-    JStormWatermarkHoldState(
-            StateNamespace namespace,
-            GroupingState<Instant, Instant> watermarkHoldsState,
-            TimestampCombiner timestampCombiner,
-            TimerService timerService) {
-        this.namespace = checkNotNull(namespace, "namespace");
-        this.watermarkHoldsState = checkNotNull(watermarkHoldsState, "watermarkHoldsState");
-        this.timestampCombiner = checkNotNull(timestampCombiner, "timestampCombiner");
-        this.timerService = checkNotNull(timerService, "timerService");
-    }
+  JStormWatermarkHoldState(
+      StateNamespace namespace,
+      GroupingState<Instant, Instant> watermarkHoldsState,
+      TimestampCombiner timestampCombiner,
+      TimerService timerService) {
+    this.namespace = checkNotNull(namespace, "namespace");
+    this.watermarkHoldsState = checkNotNull(watermarkHoldsState, "watermarkHoldsState");
+    this.timestampCombiner = checkNotNull(timestampCombiner, "timestampCombiner");
+    this.timerService = checkNotNull(timerService, "timerService");
+  }
 
-    @Override
-    public TimestampCombiner getTimestampCombiner() {
-        return timestampCombiner;
-    }
+  @Override
+  public TimestampCombiner getTimestampCombiner() {
+    return timestampCombiner;
+  }
 
-    @Override
-    public void add(Instant instant) {
-        timerService.addWatermarkHold(namespace.stringKey(), instant);
-        watermarkHoldsState.add(instant);
-    }
+  @Override
+  public void add(Instant instant) {
+    timerService.addWatermarkHold(namespace.stringKey(), instant);
+    watermarkHoldsState.add(instant);
+  }
 
-    @Override
-    public ReadableState<Boolean> isEmpty() {
-        return watermarkHoldsState.isEmpty();
-    }
+  @Override
+  public ReadableState<Boolean> isEmpty() {
+    return watermarkHoldsState.isEmpty();
+  }
 
-    @Override
-    public Instant read() {
-        return watermarkHoldsState.read();
-    }
+  @Override
+  public Instant read() {
+    return watermarkHoldsState.read();
+  }
 
-    @Override
-    public WatermarkHoldState readLater() {
-        // TODO: support prefetch.
-        return this;
-    }
+  @Override
+  public WatermarkHoldState readLater() {
+    // TODO: support prefetch.
+    return this;
+  }
 
-    @Override
-    public void clear() {
-        timerService.clearWatermarkHold(namespace.stringKey());
-        watermarkHoldsState.clear();
-    }
+  @Override
+  public void clear() {
+    timerService.clearWatermarkHold(namespace.stringKey());
+    watermarkHoldsState.clear();
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java
index 4b5f83c..184a957 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/runtime/timer/JStormTimerInternals.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,83 +17,84 @@
  */
 package org.apache.beam.runners.jstorm.translation.runtime.timer;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import javax.annotation.Nullable;
+import org.apache.beam.runners.core.StateNamespace;
+import org.apache.beam.runners.core.TimerInternals;
 import org.apache.beam.runners.jstorm.translation.runtime.DoFnExecutor;
 import org.apache.beam.runners.jstorm.translation.runtime.TimerService;
-import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.core.StateNamespace;
 import org.apache.beam.sdk.state.TimeDomain;
 import org.joda.time.Instant;
 
-import javax.annotation.Nullable;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
 /**
  * JStorm implementation of {@link TimerInternals}.
  */
 public class JStormTimerInternals<K> implements TimerInternals {
 
-    private final K key;
-    private final DoFnExecutor<?, ?> doFnExecutor;
-    private final TimerService timerService;
-
-
-    public JStormTimerInternals(@Nullable K key, DoFnExecutor<?, ?> doFnExecutor, TimerService timerService) {
-        this.key = key;
-        this.doFnExecutor = checkNotNull(doFnExecutor, "doFnExecutor");
-        this.timerService = checkNotNull(timerService, "timerService");
-    }
-
-    @Override
-    public void setTimer(StateNamespace namespace, String timerId, Instant target, TimeDomain timeDomain) {
-        setTimer(TimerData.of(timerId, namespace, target, timeDomain));
-    }
-
-    @Override
-    @Deprecated
-    public void setTimer(TimerData timerData) {
-        timerService.setTimer(key, timerData, doFnExecutor);
-    }
-
-    @Override
-    public void deleteTimer(StateNamespace namespace, String timerId, TimeDomain timeDomain) {
-        throw new UnsupportedOperationException(
-                "Canceling of a timer is not yet supported.");
-    }
-
-    @Override
-    @Deprecated
-    public void deleteTimer(StateNamespace namespace, String timerId) {
-        throw new UnsupportedOperationException(
-                "Canceling of a timer is not yet supported.");
-    }
-
-    @Override
-    @Deprecated
-    public void deleteTimer(TimerData timerData) {
-        throw new UnsupportedOperationException(
-                "Canceling of a timer is not yet supported.");
-    }
-
-    @Override
-    public Instant currentProcessingTime() {
-        return Instant.now();
-    }
-
-    @Override
-    @Nullable
-    public Instant currentSynchronizedProcessingTime() {
-        return null;
-    }
-
-    @Override
-    public Instant currentInputWatermarkTime() {
-        return new Instant(timerService.currentInputWatermark());
-    }
-
-    @Override
-    @Nullable
-    public Instant currentOutputWatermarkTime() {
-        return new Instant(timerService.currentOutputWatermark());
-    }
+  private final K key;
+  private final DoFnExecutor<?, ?> doFnExecutor;
+  private final TimerService timerService;
+
+
+  public JStormTimerInternals(
+      @Nullable K key, DoFnExecutor<?, ?> doFnExecutor, TimerService timerService) {
+    this.key = key;
+    this.doFnExecutor = checkNotNull(doFnExecutor, "doFnExecutor");
+    this.timerService = checkNotNull(timerService, "timerService");
+  }
+
+  @Override
+  public void setTimer(
+      StateNamespace namespace, String timerId, Instant target, TimeDomain timeDomain) {
+    setTimer(TimerData.of(timerId, namespace, target, timeDomain));
+  }
+
+  @Override
+  @Deprecated
+  public void setTimer(TimerData timerData) {
+    timerService.setTimer(key, timerData, doFnExecutor);
+  }
+
+  @Override
+  public void deleteTimer(StateNamespace namespace, String timerId, TimeDomain timeDomain) {
+    throw new UnsupportedOperationException(
+        "Canceling of a timer is not yet supported.");
+  }
+
+  @Override
+  @Deprecated
+  public void deleteTimer(StateNamespace namespace, String timerId) {
+    throw new UnsupportedOperationException(
+        "Canceling of a timer is not yet supported.");
+  }
+
+  @Override
+  @Deprecated
+  public void deleteTimer(TimerData timerData) {
+    throw new UnsupportedOperationException(
+        "Canceling of a timer is not yet supported.");
+  }
+
+  @Override
+  public Instant currentProcessingTime() {
+    return Instant.now();
+  }
+
+  @Override
+  @Nullable
+  public Instant currentSynchronizedProcessingTime() {
+    return null;
+  }
+
+  @Override
+  public Instant currentInputWatermarkTime() {
+    return new Instant(timerService.currentInputWatermark());
+  }
+
+  @Override
+  @Nullable
+  public Instant currentOutputWatermarkTime() {
+    return new Instant(timerService.currentOutputWatermark());
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java
index 9651fc2..7e7a54a 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/BoundedSourceTranslator.java
@@ -17,10 +17,9 @@
  */
 package org.apache.beam.runners.jstorm.translation.translator;
 
+import org.apache.beam.runners.core.construction.UnboundedReadFromBoundedSource;
 import org.apache.beam.runners.jstorm.translation.TranslationContext;
 import org.apache.beam.runners.jstorm.translation.runtime.UnboundedSourceSpout;
-
-import org.apache.beam.runners.core.construction.UnboundedReadFromBoundedSource;
 import org.apache.beam.sdk.io.Read;
 import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.TaggedPValue;
@@ -33,18 +32,20 @@ import org.apache.beam.sdk.values.TupleTag;
  */
 public class BoundedSourceTranslator<T> extends TransformTranslator.Default<Read.Bounded<T>> {
 
-    @Override
-    public void translateNode(Read.Bounded<T> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+  @Override
+  public void translateNode(Read.Bounded<T> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    String description =
+        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
 
-        TupleTag<?> outputTag = userGraphContext.getOutputTag();
-        PValue outputValue = userGraphContext.getOutput();
-        UnboundedSourceSpout spout = new UnboundedSourceSpout(
-                description,
-                new UnboundedReadFromBoundedSource.BoundedToUnboundedSourceAdapter(transform.getSource()),
-                userGraphContext.getOptions(), outputTag);
+    TupleTag<?> outputTag = userGraphContext.getOutputTag();
+    PValue outputValue = userGraphContext.getOutput();
+    UnboundedSourceSpout spout = new UnboundedSourceSpout(
+        description,
+        new UnboundedReadFromBoundedSource.BoundedToUnboundedSourceAdapter(transform.getSource()),
+        userGraphContext.getOptions(), outputTag);
 
-        context.getExecutionGraphContext().registerSpout(spout, TaggedPValue.of(outputTag, outputValue));
-    }
+    context.getExecutionGraphContext().registerSpout(
+        spout, TaggedPValue.of(outputTag, outputValue));
+  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java
index c4da58a..fe5fca9 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java
@@ -19,6 +19,7 @@ package org.apache.beam.runners.jstorm.translation.translator;
 
 import org.apache.beam.sdk.transforms.Combine;
 
-public class CombineGloballyTranslator<InputT, OutputT> extends TransformTranslator.Default<Combine.Globally<InputT, OutputT>> {
-    
+public class CombineGloballyTranslator<InputT, OutputT>
+    extends TransformTranslator.Default<Combine.Globally<InputT, OutputT>> {
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java
index 99cbff7..c382fb7 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java
@@ -19,6 +19,7 @@ package org.apache.beam.runners.jstorm.translation.translator;
 
 import org.apache.beam.sdk.transforms.Combine;
 
-public class CombinePerKeyTranslator<K, InputT, OutputT> extends TransformTranslator.Default<Combine.PerKey<K, InputT, OutputT>> {
-    
+public class CombinePerKeyTranslator<K, InputT, OutputT>
+    extends TransformTranslator.Default<Combine.PerKey<K, InputT, OutputT>> {
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
index 4558216..bf8d472 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/FlattenTranslator.java
@@ -18,32 +18,30 @@
 package org.apache.beam.runners.jstorm.translation.translator;
 
 import com.google.common.collect.Maps;
-import org.apache.beam.sdk.transforms.Flatten;
-
+import java.util.Map;
 import org.apache.beam.runners.jstorm.translation.TranslationContext;
 import org.apache.beam.runners.jstorm.translation.runtime.FlattenExecutor;
+import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.TupleTag;
 
-import java.util.Map;
-
 public class FlattenTranslator<V> extends TransformTranslator.Default<Flatten.PCollections<V>> {
 
-    @Override
-    public void translateNode(Flatten.PCollections<V> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+  @Override
+  public void translateNode(Flatten.PCollections<V> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
 
-        // Since a new tag is created in PCollectionList, retrieve the real tag here.
-        Map<TupleTag<?>, PValue> inputs = Maps.newHashMap();
-        for (Map.Entry<TupleTag<?>, PValue> entry : userGraphContext.getInputs().entrySet()) {
-            PCollection<V> pc = (PCollection<V>) entry.getValue();
-            inputs.putAll(pc.expand());
-        }
-        System.out.println("Real inputs: " + inputs);
-        System.out.println("FlattenList inputs: " + userGraphContext.getInputs());
-        String description = describeTransform(transform, inputs, userGraphContext.getOutputs());
-        FlattenExecutor executor = new FlattenExecutor(description, userGraphContext.getOutputTag());
-        context.addTransformExecutor(executor, inputs, userGraphContext.getOutputs());
+    // Since a new tag is created in PCollectionList, retrieve the real tag here.
+    Map<TupleTag<?>, PValue> inputs = Maps.newHashMap();
+    for (Map.Entry<TupleTag<?>, PValue> entry : userGraphContext.getInputs().entrySet()) {
+      PCollection<V> pc = (PCollection<V>) entry.getValue();
+      inputs.putAll(pc.expand());
     }
+    System.out.println("Real inputs: " + inputs);
+    System.out.println("FlattenList inputs: " + userGraphContext.getInputs());
+    String description = describeTransform(transform, inputs, userGraphContext.getOutputs());
+    FlattenExecutor executor = new FlattenExecutor(description, userGraphContext.getOutputTag());
+    context.addTransformExecutor(executor, inputs, userGraphContext.getOutputs());
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/aa251a4a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
index 6b8297b..85f96ce 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTranslator.java
@@ -17,53 +17,52 @@
  */
 package org.apache.beam.runners.jstorm.translation.translator;
 
-import org.apache.beam.runners.jstorm.translation.runtime.GroupByWindowExecutor;
 import com.google.common.collect.Lists;
-import org.apache.beam.sdk.transforms.GroupByKey;
-
+import java.util.Collections;
+import java.util.List;
 import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.sdk.values.WindowingStrategy;
+import org.apache.beam.runners.jstorm.translation.runtime.GroupByWindowExecutor;
+import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
-
-import java.util.Collections;
-import java.util.List;
+import org.apache.beam.sdk.values.WindowingStrategy;
 
 public class GroupByKeyTranslator<K, V> extends TransformTranslator.Default<GroupByKey<K, V>> {
-    // information of transform
-    protected PCollection<KV<K, V>> input;
-    protected PCollection<KV<K, Iterable<V>>> output;
-    protected List<TupleTag<?>> inputTags;
-    protected TupleTag<KV<K, Iterable<V>>> mainOutputTag;
-    protected List<TupleTag<?>> sideOutputTags;
-    protected List<PCollectionView<?>> sideInputs;
-    protected WindowingStrategy<?, ?> windowingStrategy;
+  // information of transform
+  protected PCollection<KV<K, V>> input;
+  protected PCollection<KV<K, Iterable<V>>> output;
+  protected List<TupleTag<?>> inputTags;
+  protected TupleTag<KV<K, Iterable<V>>> mainOutputTag;
+  protected List<TupleTag<?>> sideOutputTags;
+  protected List<PCollectionView<?>> sideInputs;
+  protected WindowingStrategy<?, ?> windowingStrategy;
 
-    @Override
-    public void translateNode(GroupByKey<K, V> transform, TranslationContext context) {
-        TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
-        String description = describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
+  @Override
+  public void translateNode(GroupByKey<K, V> transform, TranslationContext context) {
+    TranslationContext.UserGraphContext userGraphContext = context.getUserGraphContext();
+    String description =
+        describeTransform(transform, userGraphContext.getInputs(), userGraphContext.getOutputs());
 
-        input = (PCollection<KV<K, V>>) userGraphContext.getInput();
-        output = (PCollection<KV<K, Iterable<V>>>) userGraphContext.getOutput();
+    input = (PCollection<KV<K, V>>) userGraphContext.getInput();
+    output = (PCollection<KV<K, Iterable<V>>>) userGraphContext.getOutput();
 
-        inputTags = userGraphContext.getInputTags();
-        mainOutputTag = (TupleTag<KV<K, Iterable<V>>>) userGraphContext.getOutputTag();
-        sideOutputTags = Lists.newArrayList();
+    inputTags = userGraphContext.getInputTags();
+    mainOutputTag = (TupleTag<KV<K, Iterable<V>>>) userGraphContext.getOutputTag();
+    sideOutputTags = Lists.newArrayList();
 
-        sideInputs = Collections.<PCollectionView<?>>emptyList();
-        windowingStrategy = input.getWindowingStrategy();
+    sideInputs = Collections.<PCollectionView<?>>emptyList();
+    windowingStrategy = input.getWindowingStrategy();
 
-        GroupByWindowExecutor<K, V> groupByWindowExecutor = new GroupByWindowExecutor<>(
-                userGraphContext.getStepName(),
-                description,
-                context,
-                context.getUserGraphContext().getOptions(),
-                windowingStrategy,
-                mainOutputTag,
-                sideOutputTags);
-        context.addTransformExecutor(groupByWindowExecutor);
-    }
+    GroupByWindowExecutor<K, V> groupByWindowExecutor = new GroupByWindowExecutor<>(
+        userGraphContext.getStepName(),
+        description,
+        context,
+        context.getUserGraphContext().getOptions(),
+        windowingStrategy,
+        mainOutputTag,
+        sideOutputTags);
+    context.addTransformExecutor(groupByWindowExecutor);
+  }
 }


[17/53] [abbrv] beam git commit: jstorm-runner: remove ValidatesRunner tests and dead code from jstorm module.

Posted by pe...@apache.org.
jstorm-runner: remove ValidatesRunner tests and dead code from jstorm module.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/4ff42cbc
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/4ff42cbc
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/4ff42cbc

Branch: refs/heads/jstorm-runner
Commit: 4ff42cbc65452ae6259d90f07f2f80423eeb69df
Parents: aa251a4
Author: Pei He <pe...@apache.org>
Authored: Thu Jul 13 18:38:49 2017 +0800
Committer: Pei He <pe...@apache.org>
Committed: Sat Aug 19 12:02:56 2017 +0800

----------------------------------------------------------------------
 .../jstorm/translation/TranslatorRegistry.java  |  18 -
 .../translator/CombineGloballyTranslator.java   |  25 -
 .../translator/CombinePerKeyTranslator.java     |  25 -
 .../translator/ReshuffleTranslator.java         |  24 -
 .../translator/WindowBoundTranslator.java       |  47 --
 .../util/DefaultSideInputReader.java            |  45 --
 .../translator/CoGroupByKeyTest.java            | 301 ---------
 .../translation/translator/GroupByKeyTest.java  | 155 -----
 .../translation/translator/ParDoTest.java       | 624 -------------------
 9 files changed, 1264 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/4ff42cbc/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
index bce5b3e..316186e 100644
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
+++ b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/TranslatorRegistry.java
@@ -49,30 +49,12 @@ public class TranslatorRegistry {
   static {
     TRANSLATORS.put(Read.Bounded.class, new BoundedSourceTranslator());
     TRANSLATORS.put(Read.Unbounded.class, new UnboundedSourceTranslator());
-    // TRANSLATORS.put(Write.Bound.class, new WriteSinkStreamingTranslator());
-    // TRANSLATORS.put(TextIO.Write.Bound.class, new TextIOWriteBoundStreamingTranslator());
-
     TRANSLATORS.put(ParDo.SingleOutput.class, new ParDoBoundTranslator());
     TRANSLATORS.put(ParDo.MultiOutput.class, new ParDoBoundMultiTranslator());
-
-    //TRANSLATORS.put(Window.Bound.class, new WindowBoundTranslator<>());
     TRANSLATORS.put(Window.Assign.class, new WindowAssignTranslator<>());
-
     TRANSLATORS.put(Flatten.PCollections.class, new FlattenTranslator());
-
     TRANSLATORS.put(GroupByKey.class, new GroupByKeyTranslator());
-
     TRANSLATORS.put(ViewTranslator.CreateJStormPCollectionView.class, new ViewTranslator());
-
-    /**
-     * Currently, empty translation is required for combine and reshuffle.
-     * Because, the transforms will be mapped to GroupByKey and Pardo finally.
-     * So we only need to translator the finally transforms.
-     * If any improvement is required, the composite transforms will be translated in the future.
-     */
-    // TRANSLATORS.put(Combine.PerKey.class, new CombinePerKeyTranslator());
-    // TRANSLATORS.put(Globally.class, new CombineGloballyTranslator());
-    // TRANSLATORS.put(Reshuffle.class, new ReshuffleTranslator());
   }
 
   public static TransformTranslator<?> getTranslator(PTransform<?, ?> transform) {

http://git-wip-us.apache.org/repos/asf/beam/blob/4ff42cbc/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java
deleted file mode 100644
index fe5fca9..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombineGloballyTranslator.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import org.apache.beam.sdk.transforms.Combine;
-
-public class CombineGloballyTranslator<InputT, OutputT>
-    extends TransformTranslator.Default<Combine.Globally<InputT, OutputT>> {
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/4ff42cbc/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java
deleted file mode 100644
index c382fb7..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/CombinePerKeyTranslator.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import org.apache.beam.sdk.transforms.Combine;
-
-public class CombinePerKeyTranslator<K, InputT, OutputT>
-    extends TransformTranslator.Default<Combine.PerKey<K, InputT, OutputT>> {
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/4ff42cbc/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java
deleted file mode 100644
index c450a22..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/ReshuffleTranslator.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import org.apache.beam.sdk.transforms.Reshuffle;
-
-public class ReshuffleTranslator<K, V> extends TransformTranslator.Default<Reshuffle<K, V>> {
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/4ff42cbc/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java
deleted file mode 100644
index c863c9e..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/translator/WindowBoundTranslator.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import org.apache.beam.runners.jstorm.translation.TranslationContext;
-import org.apache.beam.sdk.transforms.windowing.FixedWindows;
-import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
-import org.apache.beam.sdk.transforms.windowing.Window;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Translates a Window.Bound node into a Storm WindowedBolt
- *
- * @param <T>
- */
-public class WindowBoundTranslator<T> extends TransformTranslator.Default<Window.Assign<T>> {
-  private static final Logger LOG = LoggerFactory.getLogger(WindowBoundTranslator.class);
-
-  // Do nothing here currently. The assign of window strategy is included in AssignTranslator.
-  @Override
-  public void translateNode(Window.Assign<T> transform, TranslationContext context) {
-    if (transform.getWindowFn() instanceof FixedWindows) {
-      context.getUserGraphContext().setWindowed();
-    } else if (transform.getWindowFn() instanceof SlidingWindows) {
-      context.getUserGraphContext().setWindowed();
-    } else {
-      throw new UnsupportedOperationException(
-          "Not supported window type currently: " + transform.getWindowFn());
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/4ff42cbc/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java b/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java
deleted file mode 100644
index 750095e..0000000
--- a/runners/jstorm/src/main/java/org/apache/beam/runners/jstorm/translation/util/DefaultSideInputReader.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.util;
-
-import java.io.Serializable;
-import javax.annotation.Nullable;
-import org.apache.beam.runners.core.SideInputReader;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.values.PCollectionView;
-
-/**
- * No-op SideInputReader implementation.
- */
-public class DefaultSideInputReader implements SideInputReader, Serializable {
-  @Nullable
-  @Override
-  public <T> T get(PCollectionView<T> pCollectionView, BoundedWindow boundedWindow) {
-    return null;
-  }
-
-  @Override
-  public <T> boolean contains(PCollectionView<T> pCollectionView) {
-    return false;
-  }
-
-  @Override
-  public boolean isEmpty() {
-    return true;
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/4ff42cbc/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/CoGroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/CoGroupByKeyTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/CoGroupByKeyTest.java
deleted file mode 100644
index 809436e..0000000
--- a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/CoGroupByKeyTest.java
+++ /dev/null
@@ -1,301 +0,0 @@
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import org.apache.beam.runners.jstorm.TestJStormRunner;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
-import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.coders.StringUtf8Coder;
-import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.testing.PAssert;
-import org.apache.beam.sdk.testing.ValidatesRunner;
-import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.SerializableFunction;
-import org.apache.beam.sdk.transforms.join.CoGbkResult;
-import org.apache.beam.sdk.transforms.join.CoGroupByKey;
-import org.apache.beam.sdk.transforms.join.KeyedPCollectionTuple;
-import org.apache.beam.sdk.transforms.windowing.FixedWindows;
-import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
-import org.apache.beam.sdk.transforms.windowing.Window;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.TupleTag;
-import org.joda.time.Duration;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
-import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-
-@RunWith(JUnit4.class)
-public class CoGroupByKeyTest implements Serializable {
-    /**
-     * Converts the given list into a PCollection belonging to the provided
-     * Pipeline in such a way that coder inference needs to be performed.
-     */
-    private PCollection<KV<Integer, String>> createInput(String name,
-                                                         Pipeline p, List<KV<Integer, String>> list) {
-        return createInput(name, p, list,  new ArrayList<Long>());
-    }
-
-    /**
-     * Converts the given list with timestamps into a PCollection.
-     */
-    private PCollection<KV<Integer, String>> createInput(String name,
-                                                         Pipeline p, List<KV<Integer, String>> list, List<Long> timestamps) {
-        PCollection<KV<Integer, String>> input;
-        if (timestamps.isEmpty()) {
-            input = p.apply("Create" + name, Create.of(list)
-                    .withCoder(KvCoder.of(BigEndianIntegerCoder.of(), StringUtf8Coder.of())));
-        } else {
-            input = p.apply("Create" + name, Create.timestamped(list, timestamps)
-                    .withCoder(KvCoder.of(BigEndianIntegerCoder.of(), StringUtf8Coder.of())));
-        }
-        return input.apply(
-                "Identity" + name,
-                ParDo.of(
-                        new DoFn<KV<Integer, String>, KV<Integer, String>>() {
-                            @ProcessElement
-                            public void processElement(ProcessContext c) {
-                                c.output(c.element());
-                            }
-                        }));
-    }
-
-    /**
-     * Returns a {@code PCollection<KV<Integer, CoGbkResult>>} containing the result
-     * of a {@link CoGroupByKey} over 2 {@code PCollection<KV<Integer, String>>},
-     * where each {@link PCollection} has no duplicate keys and the key sets of
-     * each {@link PCollection} are intersecting but neither is a subset of the other.
-     */
-    private PCollection<KV<Integer, CoGbkResult>> buildGetOnlyGbk(
-            Pipeline p,
-            TupleTag<String> tag1,
-            TupleTag<String> tag2) {
-        List<KV<Integer, String>> list1 =
-                Arrays.asList(
-                        KV.of(1, "collection1-1"),
-                        KV.of(2, "collection1-2"));
-        List<KV<Integer, String>> list2 =
-                Arrays.asList(
-                        KV.of(2, "collection2-2"),
-                        KV.of(3, "collection2-3"));
-        PCollection<KV<Integer, String>> collection1 = createInput("CreateList1", p, list1);
-        PCollection<KV<Integer, String>> collection2 = createInput("CreateList2", p, list2);
-        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
-                KeyedPCollectionTuple.of(tag1, collection1)
-                        .and(tag2, collection2)
-                        .apply(CoGroupByKey.<Integer>create());
-        return coGbkResults;
-    }
-
-    @Test
-    @Category(ValidatesRunner.class)
-    public void testCoGroupByKeyGetOnly() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        options.setLocalMode(true);
-
-        Pipeline p = Pipeline.create(options);
-
-        final TupleTag<String> tag1 = new TupleTag<>();
-        final TupleTag<String> tag2 = new TupleTag<>();
-
-        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
-                buildGetOnlyGbk(p, tag1, tag2);
-
-        PAssert.thatMap(coGbkResults).satisfies(
-                new SerializableFunction<Map<Integer, CoGbkResult>, Void>() {
-                    @Override
-                    public Void apply(Map<Integer, CoGbkResult> results) {
-                        assertEquals("collection1-1", results.get(1).getOnly(tag1));
-                        assertEquals("collection1-2", results.get(2).getOnly(tag1));
-                        assertEquals("collection2-2", results.get(2).getOnly(tag2));
-                        assertEquals("collection2-3", results.get(3).getOnly(tag2));
-                        return null;
-                    }
-                });
-
-        p.run();
-    }
-
-    /**
-     * Returns a {@code PCollection<KV<Integer, CoGbkResult>>} containing the
-     * results of the {@code CoGroupByKey} over three
-     * {@code PCollection<KV<Integer, String>>}, each of which correlates
-     * a customer id to purchases, addresses, or names, respectively.
-     */
-    private PCollection<KV<Integer, CoGbkResult>> buildPurchasesCoGbk(
-            Pipeline p,
-            TupleTag<String> purchasesTag,
-            TupleTag<String> addressesTag,
-            TupleTag<String> namesTag) {
-        List<KV<Integer, String>> idToPurchases =
-                Arrays.asList(
-                        KV.of(2, "Boat"),
-                        KV.of(1, "Shoes"),
-                        KV.of(3, "Car"),
-                        KV.of(1, "Book"),
-                        KV.of(10, "Pens"),
-                        KV.of(8, "House"),
-                        KV.of(4, "Suit"),
-                        KV.of(11, "House"),
-                        KV.of(14, "Shoes"),
-                        KV.of(2, "Suit"),
-                        KV.of(8, "Suit Case"),
-                        KV.of(3, "House"));
-
-        List<KV<Integer, String>> idToAddress =
-                Arrays.asList(
-                        KV.of(2, "53 S. 3rd"),
-                        KV.of(10, "383 Jackson Street"),
-                        KV.of(20, "3 W. Arizona"),
-                        KV.of(3, "29 School Rd"),
-                        KV.of(8, "6 Watling Rd"));
-
-        List<KV<Integer, String>> idToName =
-                Arrays.asList(
-                        KV.of(1, "John Smith"),
-                        KV.of(2, "Sally James"),
-                        KV.of(8, "Jeffery Spalding"),
-                        KV.of(20, "Joan Lichtfield"));
-
-        PCollection<KV<Integer, String>> purchasesTable =
-                createInput("CreateIdToPurchases", p, idToPurchases);
-
-        PCollection<KV<Integer, String>> addressTable =
-                createInput("CreateIdToAddress", p, idToAddress);
-
-        PCollection<KV<Integer, String>> nameTable =
-                createInput("CreateIdToName", p, idToName);
-
-        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
-                KeyedPCollectionTuple.of(namesTag, nameTable)
-                        .and(addressesTag, addressTable)
-                        .and(purchasesTag, purchasesTable)
-                        .apply(CoGroupByKey.<Integer>create());
-        return coGbkResults;
-    }
-
-    /**
-     * Returns a {@code PCollection<KV<Integer, CoGbkResult>>} containing the
-     * results of the {@code CoGroupByKey} over 2 {@code PCollection<KV<Integer, String>>},
-     * each of which correlates a customer id to clicks, purchases, respectively.
-     */
-    private PCollection<KV<Integer, CoGbkResult>> buildPurchasesCoGbkWithWindowing(
-            Pipeline p,
-            TupleTag<String> clicksTag,
-            TupleTag<String> purchasesTag) {
-        List<KV<Integer, String>> idToClick =
-                Arrays.asList(
-                        KV.of(1, "Click t0"),
-                        KV.of(2, "Click t2"),
-                        KV.of(1, "Click t4"),
-                        KV.of(1, "Click t6"),
-                        KV.of(2, "Click t8"));
-
-        List<KV<Integer, String>> idToPurchases =
-                Arrays.asList(
-                        KV.of(1, "Boat t1"),
-                        KV.of(1, "Shoesi t2"),
-                        KV.of(1, "Pens t3"),
-                        KV.of(2, "House t4"),
-                        KV.of(2, "Suit t5"),
-                        KV.of(1, "Car t6"),
-                        KV.of(1, "Book t7"),
-                        KV.of(2, "House t8"),
-                        KV.of(2, "Shoes t9"),
-                        KV.of(2, "House t10"));
-
-        PCollection<KV<Integer, String>> clicksTable =
-                createInput("CreateClicks",
-                        p,
-                        idToClick,
-                        Arrays.asList(0L, 2L, 4L, 6L, 8L))
-                        .apply("WindowClicks", Window.<KV<Integer, String>>into(
-                                FixedWindows.of(new Duration(4)))
-                                .withTimestampCombiner(TimestampCombiner.EARLIEST));
-
-        PCollection<KV<Integer, String>> purchasesTable =
-                createInput("CreatePurchases",
-                        p,
-                        idToPurchases,
-                        Arrays.asList(1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L))
-                        .apply("WindowPurchases", Window.<KV<Integer, String>>into(
-                                FixedWindows.of(new Duration(4)))
-                                .withTimestampCombiner(TimestampCombiner.EARLIEST));
-
-        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
-                KeyedPCollectionTuple.of(clicksTag, clicksTable)
-                        .and(purchasesTag, purchasesTable)
-                        .apply(CoGroupByKey.<Integer>create());
-        return coGbkResults;
-    }
-
-    @Test
-    @Category(ValidatesRunner.class)
-    public void testCoGroupByKey() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        options.setLocalMode(true);
-
-        Pipeline p = Pipeline.create(options);
-
-        final TupleTag<String> namesTag = new TupleTag<>();
-        final TupleTag<String> addressesTag = new TupleTag<>();
-        final TupleTag<String> purchasesTag = new TupleTag<>();
-
-
-        PCollection<KV<Integer, CoGbkResult>> coGbkResults =
-                buildPurchasesCoGbk(p, purchasesTag, addressesTag, namesTag);
-
-        PAssert.thatMap(coGbkResults).satisfies(
-                new SerializableFunction<Map<Integer, CoGbkResult>, Void>() {
-                    @Override
-                    public Void apply(Map<Integer, CoGbkResult> results) {
-                        CoGbkResult result1 = results.get(1);
-                        assertEquals("John Smith", result1.getOnly(namesTag));
-                        assertThat(result1.getAll(purchasesTag), containsInAnyOrder("Shoes", "Book"));
-
-                        CoGbkResult result2 = results.get(2);
-                        assertEquals("Sally James", result2.getOnly(namesTag));
-                        assertEquals("53 S. 3rd", result2.getOnly(addressesTag));
-                        assertThat(result2.getAll(purchasesTag), containsInAnyOrder("Suit", "Boat"));
-
-                        CoGbkResult result3 = results.get(3);
-                        assertEquals("29 School Rd", result3.getOnly(addressesTag), "29 School Rd");
-                        assertThat(result3.getAll(purchasesTag), containsInAnyOrder("Car", "House"));
-
-                        CoGbkResult result8 = results.get(8);
-                        assertEquals("Jeffery Spalding", result8.getOnly(namesTag));
-                        assertEquals("6 Watling Rd", result8.getOnly(addressesTag));
-                        assertThat(result8.getAll(purchasesTag), containsInAnyOrder("House", "Suit Case"));
-
-                        CoGbkResult result20 = results.get(20);
-                        assertEquals("Joan Lichtfield", result20.getOnly(namesTag));
-                        assertEquals("3 W. Arizona", result20.getOnly(addressesTag));
-
-                        assertEquals("383 Jackson Street", results.get(10).getOnly(addressesTag));
-
-                        assertThat(results.get(4).getAll(purchasesTag), containsInAnyOrder("Suit"));
-                        assertThat(results.get(10).getAll(purchasesTag), containsInAnyOrder("Pens"));
-                        assertThat(results.get(11).getAll(purchasesTag), containsInAnyOrder("House"));
-                        assertThat(results.get(14).getAll(purchasesTag), containsInAnyOrder("Shoes"));
-
-                        return null;
-                    }
-                });
-
-        p.run();
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/beam/blob/4ff42cbc/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTest.java
deleted file mode 100644
index 9a8b43a..0000000
--- a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/GroupByKeyTest.java
+++ /dev/null
@@ -1,155 +0,0 @@
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import org.apache.beam.runners.jstorm.TestJStormRunner;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
-import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.coders.StringUtf8Coder;
-import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.testing.PAssert;
-import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.transforms.SerializableFunction;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.hamcrest.Description;
-import org.hamcrest.Matcher;
-import org.hamcrest.TypeSafeMatcher;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-import java.util.Arrays;
-import java.util.List;
-
-import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertThat;
-
-/**
- * Tests for {@link GroupByKey} with {@link StormRunner}.
- */
-@RunWith(JUnit4.class)
-public class GroupByKeyTest {
-
-    static final String[] WORDS_ARRAY = new String[] {
-            "hi", "there", "hi", "hi", "sue", "bob",
-            "hi", "sue", "", "", "ZOW", "bob", "" };
-
-    static final List<String> WORDS = Arrays.asList(WORDS_ARRAY);
-
-    @Test
-    public void testGroupByKey() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        options.setLocalMode(true);
-
-        Pipeline p = Pipeline.create(options);
-
-        List<KV<String, Integer>> ungroupedPairs = Arrays.asList(
-                KV.of("k1", 3),
-                KV.of("k5", Integer.MAX_VALUE),
-                KV.of("k5", Integer.MIN_VALUE),
-                KV.of("k2", 66),
-                KV.of("k1", 4),
-                KV.of("k2", -33),
-                KV.of("k3", 0));
-
-        PCollection<KV<String, Integer>> input =
-                p.apply(Create.of(ungroupedPairs)
-                        .withCoder(KvCoder.of(StringUtf8Coder.of(), BigEndianIntegerCoder.of())));
-
-        PCollection<KV<String, Iterable<Integer>>> output =
-                input.apply(GroupByKey.<String, Integer>create());
-
-        PAssert.that(output)
-                .satisfies(new AssertThatHasExpectedContentsForTestGroupByKey());
-
-        p.run();
-    }
-
-    @Test
-    public void testCountGloballyBasic() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        options.setLocalMode(true);
-
-        Pipeline p = Pipeline.create(options);
-        PCollection<String> input = p.apply(Create.of(WORDS));
-
-        PCollection<Long> output =
-                input.apply(Count.<String>globally());
-
-        PAssert.that(output)
-                .containsInAnyOrder(13L);
-        p.run();
-    }
-
-    static class AssertThatHasExpectedContentsForTestGroupByKey
-            implements SerializableFunction<Iterable<KV<String, Iterable<Integer>>>,
-            Void> {
-        @Override
-        public Void apply(Iterable<KV<String, Iterable<Integer>>> actual) {
-            assertThat(actual, containsInAnyOrder(
-                    KvMatcher.isKv(is("k1"), containsInAnyOrder(3, 4)),
-                    KvMatcher.isKv(is("k5"), containsInAnyOrder(Integer.MAX_VALUE,
-                            Integer.MIN_VALUE)),
-                    KvMatcher.isKv(is("k2"), containsInAnyOrder(66, -33)),
-                    KvMatcher.isKv(is("k3"), containsInAnyOrder(0))));
-            return null;
-        }
-    }
-
-    /**
-     * Matcher for KVs.
-     */
-    public static class KvMatcher<K, V>
-            extends TypeSafeMatcher<KV<? extends K, ? extends V>> {
-        final Matcher<? super K> keyMatcher;
-        final Matcher<? super V> valueMatcher;
-
-        public static <K, V> KvMatcher<K, V> isKv(Matcher<K> keyMatcher,
-                                                  Matcher<V> valueMatcher) {
-            return new KvMatcher<>(keyMatcher, valueMatcher);
-        }
-
-        public KvMatcher(Matcher<? super K> keyMatcher,
-                         Matcher<? super V> valueMatcher) {
-            this.keyMatcher = keyMatcher;
-            this.valueMatcher = valueMatcher;
-        }
-
-        @Override
-        public boolean matchesSafely(KV<? extends K, ? extends V> kv) {
-            return keyMatcher.matches(kv.getKey())
-                    && valueMatcher.matches(kv.getValue());
-        }
-
-        @Override
-        public void describeTo(Description description) {
-            description
-                    .appendText("a KV(").appendValue(keyMatcher)
-                    .appendText(", ").appendValue(valueMatcher)
-                    .appendText(")");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/4ff42cbc/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/ParDoTest.java
----------------------------------------------------------------------
diff --git a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/ParDoTest.java b/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/ParDoTest.java
deleted file mode 100644
index c911364..0000000
--- a/runners/jstorm/src/test/java/org/apache/beam/runners/jstorm/translation/translator/ParDoTest.java
+++ /dev/null
@@ -1,624 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.jstorm.translation.translator;
-
-import org.apache.beam.runners.jstorm.TestJStormRunner;
-import com.google.common.base.MoreObjects;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.coders.*;
-import org.apache.beam.sdk.io.GenerateSequence;
-import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.state.*;
-import org.apache.beam.sdk.testing.PAssert;
-import org.apache.beam.sdk.testing.UsesMapState;
-import org.apache.beam.sdk.testing.UsesStatefulParDo;
-import org.apache.beam.sdk.testing.ValidatesRunner;
-import org.apache.beam.sdk.transforms.*;
-import org.apache.beam.sdk.transforms.windowing.*;
-import org.apache.beam.sdk.values.*;
-import org.joda.time.Duration;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Serializable;
-import java.util.*;
-
-import static org.hamcrest.Matchers.anyOf;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
-import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-
-/**
- * Tests for {@link ParDo} with {@link StormRunner}.
- */
-@RunWith(JUnit4.class)
-public class ParDoTest implements Serializable {
-
-    @Test
-    public void testParDo() throws IOException {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        List<Integer> inputs = Arrays.asList(3, -42, 666);
-
-        PCollection<String> output = pipeline
-                .apply(Create.of(inputs))
-                .apply(ParDo.of(new TestDoFn()));
-
-        PAssert.that(output)
-                .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
-
-        pipeline.run();
-    }
-
-    @Test
-    public void testParDoWithSideInputs() throws IOException {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        List<Integer> inputs = Arrays.asList(3, -42, 666);
-
-        PCollectionView<Integer> sideInput1 = pipeline
-                .apply("CreateSideInput1", Create.of(11))
-                .apply("ViewSideInput1", View.<Integer>asSingleton());
-        PCollectionView<Integer> sideInputUnread = pipeline
-                .apply("CreateSideInputUnread", Create.of(-3333))
-                .apply("ViewSideInputUnread", View.<Integer>asSingleton());
-
-        PCollectionView<Integer> sideInput2 = pipeline
-                .apply("CreateSideInput2", Create.of(222))
-                .apply("ViewSideInput2", View.<Integer>asSingleton());
-        PCollection<String> output = pipeline
-                .apply(Create.of(inputs))
-                .apply(ParDo.of(new TestDoFn(
-                                Arrays.asList(sideInput1, sideInput2),
-                                Arrays.<TupleTag<String>>asList()))
-                        .withSideInputs(sideInput1, sideInputUnread, sideInput2));
-
-        PAssert.that(output)
-                .satisfies(ParDoTest.HasExpectedOutput
-                        .forInput(inputs)
-                        .andSideInputs(11, 222));
-
-        pipeline.run();
-    }
-
-    @Test
-    public void testParDoWithTaggedOutput() {
-        List<Integer> inputs = Arrays.asList(3, -42, 666);
-
-        TupleTag<String> mainOutputTag = new TupleTag<String>("main"){};
-        TupleTag<String> additionalOutputTag1 = new TupleTag<String>("additional1"){};
-        TupleTag<String> additionalOutputTag2 = new TupleTag<String>("additional2"){};
-        TupleTag<String> additionalOutputTag3 = new TupleTag<String>("additional3"){};
-        TupleTag<String> additionalOutputTagUnwritten = new TupleTag<String>("unwrittenOutput"){};
-
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        PCollectionTuple outputs = pipeline
-            .apply(Create.of(inputs))
-            .apply(ParDo
-                .of(new TestDoFn(
-                    Arrays.<PCollectionView<Integer>>asList(),
-                    Arrays.asList(additionalOutputTag1, additionalOutputTag2, additionalOutputTag3)))
-                .withOutputTags(
-                    mainOutputTag,
-                    TupleTagList.of(additionalOutputTag3)
-                        .and(additionalOutputTag1)
-                        .and(additionalOutputTagUnwritten)
-                        .and(additionalOutputTag2)));
-
-        PAssert.that(outputs.get(mainOutputTag))
-            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
-
-        PAssert.that(outputs.get(additionalOutputTag1))
-            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                .fromOutput(additionalOutputTag1));
-        PAssert.that(outputs.get(additionalOutputTag2))
-            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                .fromOutput(additionalOutputTag2));
-        PAssert.that(outputs.get(additionalOutputTag3))
-            .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                .fromOutput(additionalOutputTag3));
-        PAssert.that(outputs.get(additionalOutputTagUnwritten)).empty();
-
-        pipeline.run();
-    }
-
-    @Test
-    public void testNoWindowFnDoesNotReassignWindows() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        final PCollection<Long> initialWindows =
-                pipeline
-                    .apply(GenerateSequence.from(0).to(10))
-                    .apply("AssignWindows", Window.into(new WindowOddEvenBuckets()));
-
-        // Sanity check the window assignment to demonstrate the baseline
-        PAssert.that(initialWindows)
-                .inWindow(WindowOddEvenBuckets.EVEN_WINDOW)
-                .containsInAnyOrder(0L, 2L, 4L, 6L, 8L);
-        PAssert.that(initialWindows)
-                .inWindow(WindowOddEvenBuckets.ODD_WINDOW)
-                .containsInAnyOrder(1L, 3L, 5L, 7L, 9L);
-
-        PCollection<Boolean> upOne =
-                initialWindows.apply(
-                        "ModifyTypes",
-                        MapElements.<Long, Boolean>via(
-                                new SimpleFunction<Long, Boolean>() {
-                                    @Override
-                                    public Boolean apply(Long input) {
-                                        return input % 2 == 0;
-                                    }
-                                }));
-        PAssert.that(upOne)
-                .inWindow(WindowOddEvenBuckets.EVEN_WINDOW)
-                .containsInAnyOrder(true, true, true, true, true);
-        PAssert.that(upOne)
-                .inWindow(WindowOddEvenBuckets.ODD_WINDOW)
-                .containsInAnyOrder(false, false, false, false, false);
-
-        // The elements should be in the same windows, even though they would not be assigned to the
-        // same windows with the updated timestamps. If we try to apply the original WindowFn, the type
-        // will not be appropriate and the runner should crash, as a Boolean cannot be converted into
-        // a long.
-        PCollection<Boolean> updatedTrigger =
-                upOne.apply(
-                        "UpdateWindowingStrategy",
-                        Window.<Boolean>configure().triggering(Never.ever())
-                                .withAllowedLateness(Duration.ZERO)
-                                .accumulatingFiredPanes());
-        pipeline.run();
-    }
-
-    @Test
-    public void testValueStateSameId() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        final String stateId = "foo";
-
-        DoFn<KV<String, Integer>, KV<String, Integer>> fn =
-                new DoFn<KV<String, Integer>, KV<String, Integer>>() {
-
-                    @StateId(stateId)
-                    private final StateSpec<ValueState<Integer>> intState =
-                            StateSpecs.value(VarIntCoder.of());
-
-                    @ProcessElement
-                    public void processElement(
-                            ProcessContext c, @StateId(stateId) ValueState<Integer> state) {
-                        Integer currentValue = MoreObjects.firstNonNull(state.read(), 0);
-                        c.output(KV.of("sizzle", currentValue));
-                        state.write(currentValue + 1);
-                    }
-                };
-
-        DoFn<KV<String, Integer>, Integer> fn2 =
-                new DoFn<KV<String, Integer>, Integer>() {
-
-                    @StateId(stateId)
-                    private final StateSpec<ValueState<Integer>> intState =
-                            StateSpecs.value(VarIntCoder.of());
-
-                    @ProcessElement
-                    public void processElement(
-                            ProcessContext c, @StateId(stateId) ValueState<Integer> state) {
-                        Integer currentValue = MoreObjects.firstNonNull(state.read(), 13);
-                        c.output(currentValue);
-                        state.write(currentValue + 13);
-                    }
-                };
-
-        PCollection<KV<String, Integer>> intermediate =
-                pipeline.apply(Create.of(KV.of("hello", 42), KV.of("hello", 97), KV.of("hello", 84)))
-                        .apply("First stateful ParDo", ParDo.of(fn));
-
-        PCollection<Integer> output =
-                intermediate.apply("Second stateful ParDo", ParDo.of(fn2));
-
-        PAssert.that(intermediate)
-                .containsInAnyOrder(KV.of("sizzle", 0), KV.of("sizzle", 1), KV.of("sizzle", 2));
-        PAssert.that(output).containsInAnyOrder(13, 26, 39);
-        pipeline.run();
-    }
-
-    @Test
-    @Category({ValidatesRunner.class, UsesStatefulParDo.class})
-    public void testValueStateTaggedOutput() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        final String stateId = "foo";
-
-        final TupleTag<Integer> evenTag = new TupleTag<Integer>() {};
-        final TupleTag<Integer> oddTag = new TupleTag<Integer>() {};
-
-        DoFn<KV<String, Integer>, Integer> fn =
-                new DoFn<KV<String, Integer>, Integer>() {
-
-                    @StateId(stateId)
-                    private final StateSpec<ValueState<Integer>> intState =
-                            StateSpecs.value(VarIntCoder.of());
-
-                    @ProcessElement
-                    public void processElement(
-                            ProcessContext c, @StateId(stateId) ValueState<Integer> state) {
-                        Integer currentValue = MoreObjects.firstNonNull(state.read(), 0);
-                        if (currentValue % 2 == 0) {
-                            c.output(currentValue);
-                        } else {
-                            c.output(oddTag, currentValue);
-                        }
-                        state.write(currentValue + 1);
-                    }
-                };
-
-        PCollectionTuple output =
-                pipeline.apply(
-                        Create.of(
-                                KV.of("hello", 42),
-                                KV.of("hello", 97),
-                                KV.of("hello", 84),
-                                KV.of("goodbye", 33),
-                                KV.of("hello", 859),
-                                KV.of("goodbye", 83945)))
-                        .apply(ParDo.of(fn).withOutputTags(evenTag, TupleTagList.of(oddTag)));
-
-        PCollection<Integer> evens = output.get(evenTag);
-        PCollection<Integer> odds = output.get(oddTag);
-
-        // There are 0 and 2 from "hello" and just 0 from "goodbye"
-        PAssert.that(evens).containsInAnyOrder(0, 2, 0);
-
-        // There are 1 and 3 from "hello" and just "1" from "goodbye"
-        PAssert.that(odds).containsInAnyOrder(1, 3, 1);
-        pipeline.run();
-    }
-
-    @Test
-    @Category({ValidatesRunner.class, UsesStatefulParDo.class, UsesMapState.class})
-    public void testMapStateCoderInference() {
-        StormPipelineOptions options = PipelineOptionsFactory.as(StormPipelineOptions.class);
-        options.setRunner(TestJStormRunner.class);
-        Pipeline pipeline = Pipeline.create(options);
-
-        final String stateId = "foo";
-        final String countStateId = "count";
-        Coder<MyInteger> myIntegerCoder = MyIntegerCoder.of();
-        pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder);
-
-        DoFn<KV<String, KV<String, Integer>>, KV<String, MyInteger>> fn =
-                new DoFn<KV<String, KV<String, Integer>>, KV<String, MyInteger>>() {
-
-                    @StateId(stateId)
-                    private final StateSpec<MapState<String, MyInteger>> mapState = StateSpecs.map();
-
-                    @StateId(countStateId)
-                    private final StateSpec<CombiningState<Integer, int[], Integer>>
-                            countState = StateSpecs.combiningFromInputInternal(VarIntCoder.of(),
-                            Sum.ofIntegers());
-
-                    @ProcessElement
-                    public void processElement(
-                            ProcessContext c, @StateId(stateId) MapState<String, MyInteger> state,
-                            @StateId(countStateId) CombiningState<Integer, int[], Integer>
-                                    count) {
-                        KV<String, Integer> value = c.element().getValue();
-                        state.put(value.getKey(), new MyInteger(value.getValue()));
-                        count.add(1);
-                        if (count.read() >= 4) {
-                            Iterable<Map.Entry<String, MyInteger>> iterate = state.entries().read();
-                            for (Map.Entry<String, MyInteger> entry : iterate) {
-                                c.output(KV.of(entry.getKey(), entry.getValue()));
-                            }
-                        }
-                    }
-                };
-
-        PCollection<KV<String, MyInteger>> output =
-                pipeline.apply(
-                        Create.of(
-                                KV.of("hello", KV.of("a", 97)), KV.of("hello", KV.of("b", 42)),
-                                KV.of("hello", KV.of("b", 42)), KV.of("hello", KV.of("c", 12))))
-                        .apply(ParDo.of(fn)).setCoder(KvCoder.of(StringUtf8Coder.of(), myIntegerCoder));
-
-        PAssert.that(output).containsInAnyOrder(KV.of("a", new MyInteger(97)),
-                KV.of("b", new MyInteger(42)), KV.of("c", new MyInteger(12)));
-        pipeline.run();
-    }
-
-
-    private static class WindowOddEvenBuckets extends NonMergingWindowFn<Long, IntervalWindow> {
-        private static final IntervalWindow EVEN_WINDOW =
-                new IntervalWindow(
-                        BoundedWindow.TIMESTAMP_MIN_VALUE, GlobalWindow.INSTANCE.maxTimestamp());
-        private static final IntervalWindow ODD_WINDOW =
-                new IntervalWindow(
-                        BoundedWindow.TIMESTAMP_MIN_VALUE, GlobalWindow.INSTANCE.maxTimestamp().minus(1));
-
-        @Override
-        public Collection<IntervalWindow> assignWindows(AssignContext c) throws Exception {
-            if (c.element() % 2 == 0) {
-                return Collections.singleton(EVEN_WINDOW);
-            }
-            return Collections.singleton(ODD_WINDOW);
-        }
-
-        @Override
-        public boolean isCompatible(WindowFn<?, ?> other) {
-            return other instanceof WindowOddEvenBuckets;
-        }
-
-        @Override
-        public Coder<IntervalWindow> windowCoder() {
-            return new IntervalWindow.IntervalWindowCoder();
-        }
-
-        @Override
-        public WindowMappingFn<IntervalWindow> getDefaultWindowMappingFn() {
-            throw new UnsupportedOperationException(
-                    String.format("Can't use %s for side inputs", getClass().getSimpleName()));
-        }
-    }
-
-
-    static class TestDoFn extends DoFn<Integer, String> {
-        enum State {NOT_SET_UP, UNSTARTED, STARTED, PROCESSING, FINISHED}
-
-        State state = State.NOT_SET_UP;
-
-        final List<PCollectionView<Integer>> sideInputViews = new ArrayList<>();
-        final List<TupleTag<String>> additionalOutputTupleTags = new ArrayList<>();
-
-        public TestDoFn() {
-        }
-
-        public TestDoFn(List<PCollectionView<Integer>> sideInputViews,
-                        List<TupleTag<String>> additionalOutputTupleTags) {
-            this.sideInputViews.addAll(sideInputViews);
-            this.additionalOutputTupleTags.addAll(additionalOutputTupleTags);
-        }
-
-        @Setup
-        public void prepare() {
-            assertEquals(State.NOT_SET_UP, state);
-            state = State.UNSTARTED;
-        }
-
-        @StartBundle
-        public void startBundle() {
-            assertThat(state,
-                anyOf(equalTo(State.UNSTARTED), equalTo(State.FINISHED)));
-
-            state = State.STARTED;
-        }
-
-        @ProcessElement
-        public void processElement(ProcessContext c) {
-            System.out.println("Recv elem: " + c.element());
-            assertThat(state,
-                anyOf(equalTo(State.STARTED), equalTo(State.PROCESSING)));
-            state = State.PROCESSING;
-            outputToAllWithSideInputs(c, "processing: " + c.element());
-        }
-
-        @FinishBundle
-        public void finishBundle(FinishBundleContext c) {
-            assertThat(state,
-                anyOf(equalTo(State.STARTED), equalTo(State.PROCESSING)));
-            state = State.FINISHED;
-            c.output("finished", BoundedWindow.TIMESTAMP_MIN_VALUE, GlobalWindow.INSTANCE);
-            for (TupleTag<String> additionalOutputTupleTag : additionalOutputTupleTags) {
-                c.output(
-                    additionalOutputTupleTag,
-                    additionalOutputTupleTag.getId() + ": " + "finished",
-                    BoundedWindow.TIMESTAMP_MIN_VALUE,
-                    GlobalWindow.INSTANCE);
-            }
-        }
-
-        private void outputToAllWithSideInputs(ProcessContext c, String value) {
-            if (!sideInputViews.isEmpty()) {
-                List<Integer> sideInputValues = new ArrayList<>();
-                for (PCollectionView<Integer> sideInputView : sideInputViews) {
-                    sideInputValues.add(c.sideInput(sideInputView));
-                }
-                value += ": " + sideInputValues;
-            }
-            c.output(value);
-            for (TupleTag<String> additionalOutputTupleTag : additionalOutputTupleTags) {
-                c.output(additionalOutputTupleTag,
-                    additionalOutputTupleTag.getId() + ": " + value);
-            }
-        }
-    }
-
-    private static class MyInteger implements Comparable<MyInteger> {
-        private final int value;
-
-        MyInteger(int value) {
-            this.value = value;
-        }
-
-        public int getValue() {
-            return value;
-        }
-
-        @Override
-        public boolean equals(Object o) {
-            if (this == o) {
-                return true;
-            }
-
-            if (!(o instanceof MyInteger)) {
-                return false;
-            }
-
-            MyInteger myInteger = (MyInteger) o;
-
-            return value == myInteger.value;
-
-        }
-
-        @Override
-        public int hashCode() {
-            return value;
-        }
-
-        @Override
-        public int compareTo(MyInteger o) {
-            return Integer.compare(this.getValue(), o.getValue());
-        }
-
-        @Override
-        public String toString() {
-            return "MyInteger{" + "value=" + value + '}';
-        }
-    }
-
-    private static class MyIntegerCoder extends AtomicCoder<MyInteger> {
-        private static final MyIntegerCoder INSTANCE = new MyIntegerCoder();
-
-        private final VarIntCoder delegate = VarIntCoder.of();
-
-        public static MyIntegerCoder of() {
-            return INSTANCE;
-        }
-
-        @Override
-        public void encode(MyInteger value, OutputStream outStream)
-                throws CoderException, IOException {
-            delegate.encode(value.getValue(), outStream);
-        }
-
-        @Override
-        public MyInteger decode(InputStream inStream) throws CoderException,
-                IOException {
-            return new MyInteger(delegate.decode(inStream));
-        }
-    }
-
-    /** PAssert "matcher" for expected output. */
-    static class HasExpectedOutput
-        implements SerializableFunction<Iterable<String>, Void>, Serializable {
-        private final List<Integer> inputs;
-        private final List<Integer> sideInputs;
-        private final String additionalOutput;
-        private final boolean ordered;
-
-        public static HasExpectedOutput forInput(List<Integer> inputs) {
-            return new HasExpectedOutput(
-                new ArrayList<Integer>(inputs),
-                new ArrayList<Integer>(),
-                "",
-                false);
-        }
-
-        private HasExpectedOutput(List<Integer> inputs,
-                                  List<Integer> sideInputs,
-                                  String additionalOutput,
-                                  boolean ordered) {
-            this.inputs = inputs;
-            this.sideInputs = sideInputs;
-            this.additionalOutput = additionalOutput;
-            this.ordered = ordered;
-        }
-
-        public HasExpectedOutput andSideInputs(Integer... sideInputValues) {
-            List<Integer> sideInputs = new ArrayList<>();
-            for (Integer sideInputValue : sideInputValues) {
-                sideInputs.add(sideInputValue);
-            }
-            return new HasExpectedOutput(inputs, sideInputs, additionalOutput, ordered);
-        }
-
-        public HasExpectedOutput fromOutput(TupleTag<String> outputTag) {
-            return fromOutput(outputTag.getId());
-        }
-        public HasExpectedOutput fromOutput(String outputId) {
-            return new HasExpectedOutput(inputs, sideInputs, outputId, ordered);
-        }
-
-        public HasExpectedOutput inOrder() {
-            return new HasExpectedOutput(inputs, sideInputs, additionalOutput, true);
-        }
-
-        @Override
-        public Void apply(Iterable<String> outputs) {
-            List<String> processeds = new ArrayList<>();
-            List<String> finisheds = new ArrayList<>();
-            for (String output : outputs) {
-                if (output.contains("finished")) {
-                    finisheds.add(output);
-                } else {
-                    processeds.add(output);
-                }
-            }
-
-            String sideInputsSuffix;
-            if (sideInputs.isEmpty()) {
-                sideInputsSuffix = "";
-            } else {
-                sideInputsSuffix = ": " + sideInputs;
-            }
-
-            String additionalOutputPrefix;
-            if (additionalOutput.isEmpty()) {
-                additionalOutputPrefix = "";
-            } else {
-                additionalOutputPrefix = additionalOutput + ": ";
-            }
-
-            List<String> expectedProcesseds = new ArrayList<>();
-            for (Integer input : inputs) {
-                expectedProcesseds.add(
-                    additionalOutputPrefix + "processing: " + input + sideInputsSuffix);
-            }
-            String[] expectedProcessedsArray =
-                expectedProcesseds.toArray(new String[expectedProcesseds.size()]);
-            if (!ordered || expectedProcesseds.isEmpty()) {
-                assertThat(processeds, containsInAnyOrder(expectedProcessedsArray));
-            } else {
-                assertThat(processeds, contains(expectedProcessedsArray));
-            }
-
-            for (String finished : finisheds) {
-                assertEquals(additionalOutputPrefix + "finished", finished);
-            }
-
-            return null;
-        }
-    }
-}