You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by ke...@apache.org on 2016/08/06 02:52:19 UTC

[01/51] [abbrv] incubator-beam git commit: Rename DoFn to OldDoFn

Repository: incubator-beam
Updated Branches:
  refs/heads/python-sdk 65152cab8 -> d72ffb080


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
index cafe873..517f968 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
@@ -24,6 +24,7 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasName
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasType;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
 import static org.hamcrest.Matchers.allOf;
 import static org.hamcrest.Matchers.empty;
 import static org.hamcrest.Matchers.everyItem;
@@ -40,7 +41,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
 
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.display.DisplayData.Builder;
@@ -54,7 +55,6 @@ import com.google.common.testing.EqualsTester;
 
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
-
 import org.hamcrest.CustomTypeSafeMatcher;
 import org.hamcrest.FeatureMatcher;
 import org.hamcrest.Matcher;
@@ -1053,7 +1053,7 @@ public class DisplayDataTest implements Serializable {
   private static class IdentityTransform<T> extends PTransform<PCollection<T>, PCollection<T>> {
     @Override
     public PCollection<T> apply(PCollection<T> input) {
-      return input.apply(ParDo.of(new DoFn<T, T>() {
+      return input.apply(ParDo.of(new OldDoFn<T, T>() {
         @Override
         public void processElement(ProcessContext c) throws Exception {
           c.output(c.element());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
index 10a2a7e..97667a3 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
@@ -29,9 +29,9 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.DoFnTester;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -85,8 +85,8 @@ public class CoGroupByKeyTest implements Serializable {
           .withCoder(KvCoder.of(BigEndianIntegerCoder.of(), StringUtf8Coder.of())));
     }
     return input
-            .apply("Identity" + name, ParDo.of(new DoFn<KV<Integer, String>,
-                                     KV<Integer, String>>() {
+            .apply("Identity" + name, ParDo.of(new OldDoFn<KV<Integer, String>,
+                                                 KV<Integer, String>>() {
               @Override
               public void processElement(ProcessContext c) {
                 c.output(c.element());
@@ -313,11 +313,11 @@ public class CoGroupByKeyTest implements Serializable {
   }
 
   /**
-   * A DoFn used in testCoGroupByKeyWithWindowing(), to test processing the
+   * A OldDoFn used in testCoGroupByKeyWithWindowing(), to test processing the
    * results of a CoGroupByKey.
    */
   private static class ClickOfPurchaseFn extends
-      DoFn<KV<Integer, CoGbkResult>, KV<String, String>> implements RequiresWindowAccess {
+      OldDoFn<KV<Integer, CoGbkResult>, KV<String, String>> implements RequiresWindowAccess {
     private final TupleTag<String> clicksTag;
 
     private final TupleTag<String> purchasesTag;
@@ -347,11 +347,11 @@ public class CoGroupByKeyTest implements Serializable {
 
 
   /**
-   * A DoFn used in testCoGroupByKeyHandleResults(), to test processing the
+   * A OldDoFn used in testCoGroupByKeyHandleResults(), to test processing the
    * results of a CoGroupByKey.
    */
   private static class CorrelatePurchaseCountForAddressesWithoutNamesFn extends
-      DoFn<KV<Integer, CoGbkResult>, KV<String, Integer>> {
+      OldDoFn<KV<Integer, CoGbkResult>, KV<String, Integer>> {
     private final TupleTag<String> purchasesTag;
 
     private final TupleTag<String> addressesTag;
@@ -401,7 +401,7 @@ public class CoGroupByKeyTest implements Serializable {
   }
 
   /**
-   * Tests that the consuming DoFn
+   * Tests that the consuming OldDoFn
    * (CorrelatePurchaseCountForAddressesWithoutNamesFn) performs as expected.
    */
   @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java
index fb2b4d5..ed64f84 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertThat;
 import org.apache.beam.sdk.util.TriggerTester;
 import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester;
 import org.apache.beam.sdk.values.TimestampedValue;
+
 import org.joda.time.Duration;
 import org.joda.time.Instant;
 import org.junit.Before;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
index 76bc038..27d2539 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
@@ -36,8 +36,8 @@ import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.util.WindowingStrategy;
@@ -199,7 +199,7 @@ public class WindowTest implements Serializable {
         .apply(GroupByKey.<Integer, String>create())
         .apply(
             ParDo.of(
-                new DoFn<KV<Integer, Iterable<String>>, Void>() {
+                new OldDoFn<KV<Integer, Iterable<String>>, Void>() {
                   @Override
                   public void processElement(ProcessContext c) throws Exception {
                     assertThat(
@@ -231,7 +231,7 @@ public class WindowTest implements Serializable {
         .apply(Window.<KV<Integer, String>>into(FixedWindows.of(Duration.standardMinutes(10)))
             .withOutputTimeFn(OutputTimeFns.outputAtEndOfWindow()))
         .apply(GroupByKey.<Integer, String>create())
-        .apply(ParDo.of(new DoFn<KV<Integer, Iterable<String>>, Void>() {
+        .apply(ParDo.of(new OldDoFn<KV<Integer, Iterable<String>>, Void>() {
           @Override
           public void processElement(ProcessContext c) throws Exception {
             assertThat(c.timestamp(), equalTo(new Instant(10 * 60 * 1000 - 1)));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
index c1e092a..622a277 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
@@ -26,9 +26,9 @@ import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.KV;
@@ -59,7 +59,7 @@ public class WindowingTest implements Serializable {
   private static class WindowedCount extends PTransform<PCollection<String>, PCollection<String>> {
 
     private final class FormatCountsDoFn
-        extends DoFn<KV<String, Long>, String> implements RequiresWindowAccess {
+        extends OldDoFn<KV<String, Long>, String> implements RequiresWindowAccess {
       @Override
           public void processElement(ProcessContext c) {
         c.output(c.element().getKey() + ":" + c.element().getValue()
@@ -234,8 +234,8 @@ public class WindowingTest implements Serializable {
     p.run();
   }
 
-  /** A DoFn that tokenizes lines of text into individual words. */
-  static class ExtractWordsWithTimestampsFn extends DoFn<String, String> {
+  /** A OldDoFn that tokenizes lines of text into individual words. */
+  static class ExtractWordsWithTimestampsFn extends OldDoFn<String, String> {
     @Override
     public void processElement(ProcessContext c) {
       String[] words = c.element().split("[^a-zA-Z0-9']+");

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java
index c808b4d..ee5a2b3 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java
@@ -18,10 +18,12 @@
 
 package org.apache.beam.sdk.util;
 
-import org.apache.beam.sdk.transforms.Combine;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+
+import org.apache.beam.sdk.transforms.Combine;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java
index 2cbc20e..b95f235 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java
@@ -18,10 +18,12 @@
 
 package org.apache.beam.sdk.util;
 
-import org.apache.beam.sdk.transforms.Combine;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+
+import org.apache.beam.sdk.transforms.Combine;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
index d9e7593..30406fc 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
@@ -27,7 +27,6 @@ import com.google.common.collect.ImmutableList;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializerTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializerTest.java
index 6c5d0bd..f6bacc4 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializerTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializerTest.java
@@ -25,7 +25,6 @@ import static org.apache.beam.sdk.util.Structs.addString;
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeInfo;
-
 import org.hamcrest.Matchers;
 import org.junit.Assert;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java
index 7e68df9..e87bbee 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java
@@ -59,12 +59,12 @@ public class StringUtilsTest {
   /**
    * Inner class for simple name test.
    */
-  private class EmbeddedDoFn {
+  private class EmbeddedOldDoFn {
 
-    private class DeeperEmbeddedDoFn extends EmbeddedDoFn {}
+    private class DeeperEmbeddedOldDoFn extends EmbeddedOldDoFn {}
 
-    private EmbeddedDoFn getEmbedded() {
-      return new DeeperEmbeddedDoFn();
+    private EmbeddedOldDoFn getEmbedded() {
+      return new DeeperEmbeddedOldDoFn();
     }
   }
 
@@ -93,22 +93,22 @@ public class StringUtilsTest {
   @Test
   public void testSimpleName() {
     assertEquals("Embedded",
-        StringUtils.approximateSimpleName(EmbeddedDoFn.class));
+        StringUtils.approximateSimpleName(EmbeddedOldDoFn.class));
   }
 
   @Test
   public void testAnonSimpleName() throws Exception {
     thrown.expect(IllegalArgumentException.class);
 
-    EmbeddedDoFn anon = new EmbeddedDoFn(){};
+    EmbeddedOldDoFn anon = new EmbeddedOldDoFn(){};
 
     StringUtils.approximateSimpleName(anon.getClass());
   }
 
   @Test
   public void testNestedSimpleName() {
-    EmbeddedDoFn fn = new EmbeddedDoFn();
-    EmbeddedDoFn inner = fn.getEmbedded();
+    EmbeddedOldDoFn fn = new EmbeddedOldDoFn();
+    EmbeddedOldDoFn inner = fn.getEmbedded();
 
     assertEquals("DeeperEmbedded", StringUtils.approximateSimpleName(inner.getClass()));
   }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java
index b321c8f..4892bbd 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java
@@ -20,6 +20,7 @@ package org.apache.beam.sdk.util;
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
+
 import static org.junit.Assert.assertTrue;
 
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/CounterTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/CounterTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/CounterTest.java
index fb002de..79f0cb7 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/CounterTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/CounterTest.java
@@ -31,6 +31,7 @@ import static org.junit.Assert.assertTrue;
 
 import org.apache.beam.sdk.util.common.Counter.CommitState;
 import org.apache.beam.sdk.util.common.Counter.CounterMean;
+
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
index 9a8ab30..547c778 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.WindowingStrategy;
 import org.apache.beam.sdk.values.PCollection.IsBounded;
@@ -75,7 +75,7 @@ public final class PCollectionTupleTest implements Serializable {
         .apply(Create.of(inputs));
 
     PCollectionTuple outputs = mainInput.apply(ParDo
-        .of(new DoFn<Integer, Integer>() {
+        .of(new OldDoFn<Integer, Integer>() {
           @Override
           public void processElement(ProcessContext c) {
             c.sideOutput(sideOutputTag, c.element());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
index ba5dffb..c525cf1 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.VarIntCoder;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 
 import org.junit.Rule;
@@ -44,7 +44,7 @@ public class TypedPValueTest {
   @Rule
   public ExpectedException thrown = ExpectedException.none();
 
-  private static class IdentityDoFn extends DoFn<Integer, Integer> {
+  private static class IdentityDoFn extends OldDoFn<Integer, Integer> {
     private static final long serialVersionUID = 0;
     @Override
     public void processElement(ProcessContext c) throws Exception {
@@ -129,7 +129,7 @@ public class TypedPValueTest {
   static class EmptyClass {
   }
 
-  private static class EmptyClassDoFn extends DoFn<Integer, EmptyClass> {
+  private static class EmptyClassDoFn extends OldDoFn<Integer, EmptyClass> {
     private static final long serialVersionUID = 0;
     @Override
     public void processElement(ProcessContext c) throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java b/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
index 72abaea..88836f9 100644
--- a/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
+++ b/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
@@ -20,7 +20,7 @@ package org.apache.beam.sdk.extensions.joinlibrary;
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.join.CoGbkResult;
 import org.apache.beam.sdk.transforms.join.CoGroupByKey;
@@ -59,7 +59,7 @@ public class Join {
         .apply(CoGroupByKey.<K>create());
 
     return coGbkResultCollection.apply(ParDo.of(
-      new DoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
+      new OldDoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
         @Override
         public void processElement(ProcessContext c) {
           KV<K, CoGbkResult> e = c.element();
@@ -108,7 +108,7 @@ public class Join {
         .apply(CoGroupByKey.<K>create());
 
     return coGbkResultCollection.apply(ParDo.of(
-      new DoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
+      new OldDoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
         @Override
         public void processElement(ProcessContext c) {
           KV<K, CoGbkResult> e = c.element();
@@ -161,7 +161,7 @@ public class Join {
         .apply(CoGroupByKey.<K>create());
 
     return coGbkResultCollection.apply(ParDo.of(
-      new DoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
+      new OldDoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
         @Override
         public void processElement(ProcessContext c) {
           KV<K, CoGbkResult> e = c.element();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
index 76f7079..9fccbf9 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
@@ -44,7 +44,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -324,7 +324,7 @@ public class BigQueryIO {
    * <p>Each {@link TableRow} contains values indexed by column name. Here is a
    * sample processing function that processes a "line" column from rows:
    * <pre>{@code
-   * static class ExtractWordsFn extends DoFn<TableRow, String> {
+   * static class ExtractWordsFn extends OldDoFn<TableRow, String> {
    *   public void processElement(ProcessContext c) {
    *     // Get the "line" field of the TableRow object, split it into words, and emit them.
    *     TableRow row = c.element();
@@ -696,7 +696,7 @@ public class BigQueryIO {
       input.getPipeline()
           .apply("Create(CleanupOperation)", Create.of(cleanupOperation))
           .apply("Cleanup", ParDo.of(
-              new DoFn<CleanupOperation, Void>() {
+              new OldDoFn<CleanupOperation, Void>() {
                 @Override
                 public void processElement(ProcessContext c)
                     throws Exception {
@@ -707,7 +707,7 @@ public class BigQueryIO {
       return outputs.get(mainOutput);
     }
 
-    private static class IdentityFn<T> extends DoFn<T, T> {
+    private static class IdentityFn<T> extends OldDoFn<T, T> {
       @Override
       public void processElement(ProcessContext c) {
         c.output(c.element());
@@ -1262,7 +1262,7 @@ public class BigQueryIO {
    * <p>Here is a sample transform that produces TableRow values containing
    * "word" and "count" columns:
    * <pre>{@code
-   * static class FormatCountsFn extends DoFn<KV<String, Long>, TableRow> {
+   * static class FormatCountsFn extends OldDoFn<KV<String, Long>, TableRow> {
    *   public void processElement(ProcessContext c) {
    *     TableRow row = new TableRow()
    *         .set("word", c.element().getKey())
@@ -2011,11 +2011,11 @@ public class BigQueryIO {
   /////////////////////////////////////////////////////////////////////////////
 
   /**
-   * Implementation of DoFn to perform streaming BigQuery write.
+   * Implementation of OldDoFn to perform streaming BigQuery write.
    */
   @SystemDoFnInternal
   private static class StreamingWriteFn
-      extends DoFn<KV<ShardedKey<String>, TableRowInfo>, Void> {
+      extends OldDoFn<KV<ShardedKey<String>, TableRowInfo>, Void> {
     /** TableSchema in JSON. Use String to make the class Serializable. */
     private final String jsonTableSchema;
 
@@ -2248,8 +2248,8 @@ public class BigQueryIO {
    * id is created by concatenating this randomUUID with a sequential number.
    */
   private static class TagWithUniqueIdsAndTable
-      extends DoFn<TableRow, KV<ShardedKey<String>, TableRowInfo>>
-      implements DoFn.RequiresWindowAccess {
+      extends OldDoFn<TableRow, KV<ShardedKey<String>, TableRowInfo>>
+      implements OldDoFn.RequiresWindowAccess {
     /** TableSpec to write to. */
     private final String tableSpec;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
index f4082d4..1f77e3e 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
@@ -31,7 +31,7 @@ import org.apache.beam.sdk.io.range.ByteKeyRange;
 import org.apache.beam.sdk.io.range.ByteKeyRangeTracker;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.runners.PipelineRunner;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -512,7 +512,7 @@ public class BigtableIO {
       return new BigtableServiceImpl(options);
     }
 
-    private class BigtableWriterFn extends DoFn<KV<ByteString, Iterable<Mutation>>, Void> {
+    private class BigtableWriterFn extends OldDoFn<KV<ByteString, Iterable<Mutation>>, Void> {
 
       public BigtableWriterFn(String tableId, BigtableService bigtableService) {
         this.tableId = checkNotNull(tableId, "tableId");

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
index bda907a..6f3663a 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
@@ -37,9 +37,9 @@ import org.apache.beam.sdk.io.Sink.Writer;
 import org.apache.beam.sdk.options.GcpOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Values;
@@ -85,7 +85,6 @@ import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.NoSuchElementException;
-
 import javax.annotation.Nullable;
 
 /**
@@ -479,11 +478,11 @@ public class V1Beta3 {
     }
 
     /**
-     * A {@link DoFn} that splits a given query into multiple sub-queries, assigns them unique keys
-     * and outputs them as {@link KV}.
+     * A {@link OldDoFn} that splits a given query into multiple sub-queries, assigns them unique
+     * keys and outputs them as {@link KV}.
      */
     @VisibleForTesting
-    static class SplitQueryFn extends DoFn<Query, KV<Integer, Query>> {
+    static class SplitQueryFn extends OldDoFn<Query, KV<Integer, Query>> {
       private final V1Beta3Options options;
       // number of splits to make for a given query
       private final int numSplits;
@@ -560,10 +559,10 @@ public class V1Beta3 {
     }
 
     /**
-     * A {@link DoFn} that reads entities from Datastore for each query.
+     * A {@link OldDoFn} that reads entities from Datastore for each query.
      */
     @VisibleForTesting
-    static class ReadFn extends DoFn<Query, Entity> {
+    static class ReadFn extends OldDoFn<Query, Entity> {
       private final V1Beta3Options options;
       private final V1Beta3DatastoreFactory datastoreFactory;
       // Datastore client

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
index 00e7891..7d2df62 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
@@ -58,7 +58,7 @@ import org.apache.beam.sdk.testing.SourceTestUtils;
 import org.apache.beam.sdk.testing.SourceTestUtils.ExpectedSplitOutcome;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
 import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -216,7 +216,7 @@ public class BigQueryIOTest implements Serializable {
     private Object[] pollJobReturns;
     private String executingProject;
     // Both counts will be reset back to zeros after serialization.
-    // This is a work around for DoFn's verifyUnmodified check.
+    // This is a work around for OldDoFn's verifyUnmodified check.
     private transient int startJobCallsCount;
     private transient int pollJobStatusCallsCount;
 
@@ -546,7 +546,7 @@ public class BigQueryIOTest implements Serializable {
         .apply(BigQueryIO.Read.from("non-executing-project:somedataset.sometable")
             .withTestServices(fakeBqServices)
             .withoutValidation())
-        .apply(ParDo.of(new DoFn<TableRow, String>() {
+        .apply(ParDo.of(new OldDoFn<TableRow, String>() {
           @Override
           public void processElement(ProcessContext c) throws Exception {
             c.output((String) c.element().get("name"));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
index a39d7d5..83489a5 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.CountingInput;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.KV;
 
@@ -108,7 +108,7 @@ public class BigtableWriteIT implements Serializable {
 
     Pipeline p = Pipeline.create(options);
     p.apply(CountingInput.upTo(numRows))
-        .apply(ParDo.of(new DoFn<Long, KV<ByteString, Iterable<Mutation>>>() {
+        .apply(ParDo.of(new OldDoFn<Long, KV<ByteString, Iterable<Mutation>>>() {
           @Override
           public void processElement(ProcessContext c) {
             int index = c.element().intValue();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
index 59d91d4..daed1cb 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
@@ -27,7 +27,7 @@ import static com.google.datastore.v1beta3.client.DatastoreHelper.makeValue;
 
 import org.apache.beam.sdk.options.GcpOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff;
 import org.apache.beam.sdk.util.RetryHttpRequestInitializer;
 
@@ -109,9 +109,9 @@ class V1Beta3TestUtil {
   }
 
   /**
-   * A DoFn that creates entity for a long number.
+   * A OldDoFn that creates entity for a long number.
    */
-  static class CreateEntityFn extends DoFn<Long, Entity> {
+  static class CreateEntityFn extends OldDoFn<Long, Entity> {
     private final String kind;
     @Nullable
     private final String namespace;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
index 2de933c..342c4fc 100644
--- a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
+++ b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
@@ -28,7 +28,7 @@ import org.apache.beam.sdk.io.UnboundedSource;
 import org.apache.beam.sdk.io.UnboundedSource.CheckpointMark;
 import org.apache.beam.sdk.io.UnboundedSource.UnboundedReader;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -453,7 +453,7 @@ public class JmsIO {
       checkArgument((queue != null || topic != null), "Either queue or topic is required");
     }
 
-    private static class JmsWriter extends DoFn<String, Void> {
+    private static class JmsWriter extends OldDoFn<String, Void> {
 
       private ConnectionFactory connectionFactory;
       private String queue;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
index 3b64bd5..eb649a6 100644
--- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
+++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
@@ -33,7 +33,7 @@ import org.apache.beam.sdk.io.UnboundedSource.CheckpointMark;
 import org.apache.beam.sdk.io.UnboundedSource.UnboundedReader;
 import org.apache.beam.sdk.io.kafka.KafkaCheckpointMark.PartitionMark;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -550,7 +550,7 @@ public class KafkaIO {
       return typedRead
           .apply(begin)
           .apply("Remove Kafka Metadata",
-              ParDo.of(new DoFn<KafkaRecord<K, V>, KV<K, V>>() {
+              ParDo.of(new OldDoFn<KafkaRecord<K, V>, KV<K, V>>() {
                 @Override
                 public void processElement(ProcessContext ctx) {
                   ctx.output(ctx.element().getKV());
@@ -1315,7 +1315,7 @@ public class KafkaIO {
     public PDone apply(PCollection<V> input) {
       return input
         .apply("Kafka values with default key",
-          ParDo.of(new DoFn<V, KV<Void, V>>() {
+          ParDo.of(new OldDoFn<V, KV<Void, V>>() {
             @Override
             public void processElement(ProcessContext ctx) throws Exception {
               ctx.output(KV.<Void, V>of(null, ctx.element()));
@@ -1326,7 +1326,7 @@ public class KafkaIO {
     }
   }
 
-  private static class KafkaWriter<K, V> extends DoFn<KV<K, V>, Void> {
+  private static class KafkaWriter<K, V> extends OldDoFn<KV<K, V>, Void> {
 
     @Override
     public void startBundle(Context c) throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
index dd93823..d7b1921 100644
--- a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
+++ b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
@@ -33,10 +33,10 @@ import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.Max;
 import org.apache.beam.sdk.transforms.Min;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.RemoveDuplicates;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -78,7 +78,6 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
-
 import javax.annotation.Nullable;
 
 /**
@@ -281,7 +280,7 @@ public class KafkaIOTest {
     p.run();
   }
 
-  private static class ElementValueDiff extends DoFn<Long, Long> {
+  private static class ElementValueDiff extends OldDoFn<Long, Long> {
     @Override
     public void processElement(ProcessContext c) throws Exception {
       c.output(c.element() - c.timestamp().getMillis());
@@ -309,7 +308,7 @@ public class KafkaIOTest {
     p.run();
   }
 
-  private static class RemoveKafkaMetadata<K, V> extends DoFn<KafkaRecord<K, V>, KV<K, V>> {
+  private static class RemoveKafkaMetadata<K, V> extends OldDoFn<KafkaRecord<K, V>, KV<K, V>> {
     @Override
     public void processElement(ProcessContext ctx) throws Exception {
       ctx.output(ctx.element().getKV());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java
----------------------------------------------------------------------
diff --git a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java
index fef8d40..1141e88 100644
--- a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java
+++ b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java
@@ -47,9 +47,9 @@ public class WithTimestampsJava8Test implements Serializable {
          .apply(WithTimestamps.of((String input) -> new Instant(Long.valueOf(yearTwoThousand))));
 
     PCollection<KV<String, Instant>> timestampedVals =
-        timestamped.apply(ParDo.of(new DoFn<String, KV<String, Instant>>() {
+        timestamped.apply(ParDo.of(new OldDoFn<String, KV<String, Instant>>() {
           @Override
-          public void processElement(DoFn<String, KV<String, Instant>>.ProcessContext c)
+          public void processElement(OldDoFn<String, KV<String, Instant>>.ProcessContext c)
               throws Exception {
             c.output(KV.of(c.element(), c.timestamp()));
           }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java
index c0e5b17..bc55c06 100644
--- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java
+++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java
@@ -25,7 +25,7 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.values.KV;
@@ -108,7 +108,7 @@ import java.util.regex.Pattern;
  */
 public class DebuggingWordCount {
   /** A DoFn that filters for a specific key based upon a regular expression. */
-  public static class FilterTextFn extends DoFn<KV<String, Long>, KV<String, Long>> {
+  public static class FilterTextFn extends OldDoFn<KV<String, Long>, KV<String, Long>> {
     /**
      * Concept #1: The logger below uses the fully qualified class name of FilterTextFn
      * as the logger. All log statements emitted by this logger will be referenced by this name

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/MinimalWordCount.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/MinimalWordCount.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/MinimalWordCount.java
index be32afa..55beb1f 100644
--- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/MinimalWordCount.java
+++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/MinimalWordCount.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.KV;
 
@@ -82,7 +82,7 @@ public class MinimalWordCount {
      // DoFn (defined in-line) on each element that tokenizes the text line into individual words.
      // The ParDo returns a PCollection<String>, where each element is an individual word in
      // Shakespeare's collected texts.
-     .apply("ExtractWords", ParDo.of(new DoFn<String, String>() {
+     .apply("ExtractWords", ParDo.of(new OldDoFn<String, String>() {
                        @Override
                        public void processElement(ProcessContext c) {
                          for (String word : c.element().split("[^a-zA-Z']+")) {
@@ -98,7 +98,7 @@ public class MinimalWordCount {
      .apply(Count.<String>perElement())
      // Apply another ParDo transform that formats our PCollection of word counts into a printable
      // string, suitable for writing to an output file.
-     .apply("FormatResults", ParDo.of(new DoFn<KV<String, Long>, String>() {
+     .apply("FormatResults", ParDo.of(new OldDoFn<KV<String, Long>, String>() {
                        @Override
                        public void processElement(ProcessContext c) {
                          c.output(c.element().getKey() + ": " + c.element().getValue());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java
index c2defa7..ffe8b88 100644
--- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java
+++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
 import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.FixedWindows;
 import org.apache.beam.sdk.transforms.windowing.Window;
@@ -121,7 +121,7 @@ public class WindowedWordCount {
    * his masterworks. Each line of the corpus will get a random associated timestamp somewhere in a
    * 2-hour period.
    */
-  static class AddTimestampFn extends DoFn<String, String> {
+  static class AddTimestampFn extends OldDoFn<String, String> {
     private static final long RAND_RANGE = 7200000; // 2 hours in ms
 
     @Override
@@ -137,7 +137,7 @@ public class WindowedWordCount {
   }
 
   /** A DoFn that converts a Word and Count into a BigQuery table row. */
-  static class FormatAsTableRowFn extends DoFn<KV<String, Long>, TableRow> {
+  static class FormatAsTableRowFn extends OldDoFn<KV<String, Long>, TableRow> {
     @Override
     public void processElement(ProcessContext c) {
       TableRow row = new TableRow()

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WordCount.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WordCount.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WordCount.java
index 803e800..5432036 100644
--- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WordCount.java
+++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -95,7 +95,7 @@ public class WordCount {
    * of-line. This DoFn tokenizes lines of text into individual words; we pass it to a ParDo in the
    * pipeline.
    */
-  static class ExtractWordsFn extends DoFn<String, String> {
+  static class ExtractWordsFn extends OldDoFn<String, String> {
     private final Aggregator<Long, Long> emptyLines =
         createAggregator("emptyLines", new Sum.SumLongFn());
 
@@ -118,7 +118,7 @@ public class WordCount {
   }
 
   /** A DoFn that converts a Word and Count into a printable string. */
-  public static class FormatAsTextFn extends DoFn<KV<String, Long>, String> {
+  public static class FormatAsTextFn extends OldDoFn<KV<String, Long>, String> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(c.element().getKey() + ": " + c.element().getValue());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java
index 5c182b2..9b347da 100644
--- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java
+++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.IntraBundleParallelization;
 import org.apache.beam.sdk.util.Transport;
 
@@ -72,7 +72,7 @@ public class PubsubFileInjector {
   }
 
   /** A DoFn that publishes non-empty lines to Google Cloud PubSub. */
-  public static class Bound extends DoFn<String, Void> {
+  public static class Bound extends OldDoFn<String, Void> {
     private final String outputTopic;
     private final String timestampLabelKey;
     public transient Pubsub pubsub;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java b/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java
index 9a75bb7..6a1c41b 100644
--- a/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java
+++ b/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java
@@ -20,7 +20,7 @@ package ${package};
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 
 import org.slf4j.Logger;
@@ -50,13 +50,13 @@ public class StarterPipeline {
         PipelineOptionsFactory.fromArgs(args).withValidation().create());
 
     p.apply(Create.of("Hello", "World"))
-    .apply(ParDo.of(new DoFn<String, String>() {
+    .apply(ParDo.of(new OldDoFn<String, String>() {
       @Override
       public void processElement(ProcessContext c) {
         c.output(c.element().toUpperCase());
       }
     }))
-    .apply(ParDo.of(new DoFn<String, Void>() {
+    .apply(ParDo.of(new OldDoFn<String, Void>() {
       @Override
       public void processElement(ProcessContext c)  {
         LOG.info(c.element());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java b/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java
index 8c71d9d..7c13350 100644
--- a/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java
+++ b/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java
@@ -20,7 +20,7 @@ package it.pkg;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 
 import org.slf4j.Logger;
@@ -50,13 +50,13 @@ public class StarterPipeline {
         PipelineOptionsFactory.fromArgs(args).withValidation().create());
 
     p.apply(Create.of("Hello", "World"))
-    .apply(ParDo.of(new DoFn<String, String>() {
+    .apply(ParDo.of(new OldDoFn<String, String>() {
       @Override
       public void processElement(ProcessContext c) {
         c.output(c.element().toUpperCase());
       }
     }))
-    .apply(ParDo.of(new DoFn<String, Void>() {
+    .apply(ParDo.of(new OldDoFn<String, Void>() {
       @Override
       public void processElement(ProcessContext c)  {
         LOG.info(c.element());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
----------------------------------------------------------------------
diff --git a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
index f1dfbb9..0da75f4 100644
--- a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
+++ b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
@@ -20,11 +20,11 @@ package org.apache.beam.sdk.microbenchmarks.transforms;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.DoFnReflector;
 import org.apache.beam.sdk.transforms.DoFnReflector.DoFnInvoker;
 import org.apache.beam.sdk.transforms.DoFnWithContext;
 import org.apache.beam.sdk.transforms.DoFnWithContext.ExtraContextFactory;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.util.WindowingInternals;
@@ -40,7 +40,7 @@ import org.openjdk.jmh.annotations.State;
 import org.openjdk.jmh.annotations.Warmup;
 
 /**
- * Benchmarks for {@link DoFn} and {@link DoFnWithContext} invocations, specifically
+ * Benchmarks for {@link OldDoFn} and {@link DoFnWithContext} invocations, specifically
  * for measuring the overhead of {@link DoFnReflector}.
  */
 @State(Scope.Benchmark)
@@ -50,7 +50,7 @@ public class DoFnReflectorBenchmark {
 
   private static final String ELEMENT = "some string to use for testing";
 
-  private DoFn<String, String> doFn = new UpperCaseDoFn();
+  private OldDoFn<String, String> doFn = new UpperCaseDoFn();
   private DoFnWithContext<String, String> doFnWithContext = new UpperCaseDoFnWithContext();
 
   private StubDoFnProcessContext stubDoFnContext = new StubDoFnProcessContext(doFn, ELEMENT);
@@ -71,7 +71,7 @@ public class DoFnReflectorBenchmark {
   };
 
   private DoFnReflector doFnReflector;
-  private DoFn<String, String> adaptedDoFnWithContext;
+  private OldDoFn<String, String> adaptedDoFnWithContext;
 
   private DoFnInvoker<String, String> invoker;
 
@@ -100,7 +100,7 @@ public class DoFnReflectorBenchmark {
     return stubDoFnWithContextContext.output;
   }
 
-  private static class UpperCaseDoFn extends DoFn<String, String> {
+  private static class UpperCaseDoFn extends OldDoFn<String, String> {
 
     @Override
     public void processElement(ProcessContext c) throws Exception {
@@ -116,12 +116,12 @@ public class DoFnReflectorBenchmark {
     }
   }
 
-  private static class StubDoFnProcessContext extends DoFn<String, String>.ProcessContext {
+  private static class StubDoFnProcessContext extends OldDoFn<String, String>.ProcessContext {
 
     private final String element;
     private String output;
 
-    public StubDoFnProcessContext(DoFn<String, String> fn, String element) {
+    public StubDoFnProcessContext(OldDoFn<String, String> fn, String element) {
       fn.super();
       this.element = element;
     }


[50/51] [abbrv] incubator-beam git commit: Merge remote-tracking branch 'origin/master' into python-sdk

Posted by ke...@apache.org.
Merge remote-tracking branch 'origin/master' into python-sdk


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/c3988118
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/c3988118
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/c3988118

Branch: refs/heads/python-sdk
Commit: c39881186d4625d935efd9a717d9978cd1252979
Parents: 65152ca c584b37
Author: Ahmet Altay <al...@google.com>
Authored: Fri Aug 5 15:41:30 2016 -0700
Committer: Ahmet Altay <al...@google.com>
Committed: Fri Aug 5 15:41:30 2016 -0700

----------------------------------------------------------------------
 .travis.yml                                     |    6 +-
 KEYS                                            |  141 -
 NOTICE                                          |    1 -
 README.md                                       |   32 +-
 examples/java/pom.xml                           |   73 +-
 .../beam/examples/DebuggingWordCount.java       |    4 +-
 .../apache/beam/examples/MinimalWordCount.java  |   30 +-
 .../apache/beam/examples/WindowedWordCount.java |   91 +-
 .../org/apache/beam/examples/WordCount.java     |    6 +-
 .../examples/common/DataflowExampleOptions.java |   37 -
 .../examples/common/DataflowExampleUtils.java   |  491 --
 .../common/ExampleBigQueryTableOptions.java     |   10 +-
 .../beam/examples/common/ExampleOptions.java    |   83 +
 ...xamplePubsubTopicAndSubscriptionOptions.java |   10 +-
 .../common/ExamplePubsubTopicOptions.java       |   12 +-
 .../beam/examples/common/ExampleUtils.java      |  390 ++
 .../examples/common/PubsubFileInjector.java     |   10 +-
 .../beam/examples/complete/AutoComplete.java    |   91 +-
 .../examples/complete/StreamingWordExtract.java |   56 +-
 .../apache/beam/examples/complete/TfIdf.java    |   28 +-
 .../examples/complete/TopWikipediaSessions.java |   38 +-
 .../examples/complete/TrafficMaxLaneFlow.java   |   90 +-
 .../beam/examples/complete/TrafficRoutes.java   |   90 +-
 .../examples/cookbook/BigQueryTornadoes.java    |    6 +-
 .../cookbook/CombinePerKeyExamples.java         |    6 +-
 .../examples/cookbook/DatastoreWordCount.java   |   67 +-
 .../beam/examples/cookbook/DeDupExample.java    |    5 +-
 .../beam/examples/cookbook/FilterExamples.java  |   17 +-
 .../beam/examples/cookbook/JoinExamples.java    |   14 +-
 .../examples/cookbook/MaxPerKeyExamples.java    |    6 +-
 .../beam/examples/cookbook/TriggerExample.java  |   97 +-
 .../org/apache/beam/examples/WordCountIT.java   |  118 +-
 .../org/apache/beam/examples/WordCountTest.java |    3 +-
 .../examples/complete/AutoCompleteTest.java     |    2 +-
 .../examples/cookbook/TriggerExampleTest.java   |    2 +-
 examples/java8/pom.xml                          |   81 +-
 .../beam/examples/MinimalWordCountJava8.java    |   30 +-
 .../beam/examples/complete/game/GameStats.java  |   57 +-
 .../examples/complete/game/HourlyTeamScore.java |    9 +-
 .../examples/complete/game/LeaderBoard.java     |   23 +-
 .../beam/examples/complete/game/UserScore.java  |    4 +-
 .../complete/game/injector/InjectorUtils.java   |    6 +-
 .../injector/RetryHttpInitializerWrapper.java   |    5 +-
 .../complete/game/utils/WriteToBigQuery.java    |   20 +-
 .../game/utils/WriteWindowedToBigQuery.java     |   16 +-
 .../complete/game/HourlyTeamScoreTest.java      |    2 +-
 .../examples/complete/game/UserScoreTest.java   |    2 +-
 examples/pom.xml                                |    4 +-
 pom.xml                                         |  265 +-
 runners/core-java/pom.xml                       |   55 +-
 .../core/GroupAlsoByWindowViaWindowSetDoFn.java |   28 +-
 .../core/UnboundedReadFromBoundedSource.java    |  542 ++
 .../apache/beam/runners/core/package-info.java  |   22 +
 .../org/apache/beam/sdk/util/AssignWindows.java |   46 +
 .../apache/beam/sdk/util/AssignWindowsDoFn.java |   80 +
 .../beam/sdk/util/BatchTimerInternals.java      |  141 +
 .../org/apache/beam/sdk/util/DoFnRunner.java    |   63 +
 .../apache/beam/sdk/util/DoFnRunnerBase.java    |  551 ++
 .../org/apache/beam/sdk/util/DoFnRunners.java   |  222 +
 .../beam/sdk/util/GroupAlsoByWindowsDoFn.java   |   63 +
 .../GroupAlsoByWindowsViaOutputBufferDoFn.java  |  100 +
 .../sdk/util/GroupByKeyViaGroupByKeyOnly.java   |  269 +
 .../sdk/util/LateDataDroppingDoFnRunner.java    |  147 +
 .../org/apache/beam/sdk/util/NonEmptyPanes.java |  150 +
 .../apache/beam/sdk/util/PaneInfoTracker.java   |  158 +
 .../sdk/util/PushbackSideInputDoFnRunner.java   |  115 +
 .../java/org/apache/beam/sdk/util/ReduceFn.java |  130 +
 .../beam/sdk/util/ReduceFnContextFactory.java   |  497 ++
 .../apache/beam/sdk/util/ReduceFnRunner.java    |  988 ++++
 .../apache/beam/sdk/util/SimpleDoFnRunner.java  |   55 +
 .../apache/beam/sdk/util/SystemReduceFn.java    |  139 +
 .../org/apache/beam/sdk/util/TriggerRunner.java |  245 +
 .../org/apache/beam/sdk/util/WatermarkHold.java |  540 ++
 .../util/common/ElementByteSizeObservable.java  |   42 +
 .../beam/sdk/util/common/PeekingReiterator.java |   99 +
 .../beam/sdk/util/common/package-info.java      |   20 +
 .../org/apache/beam/sdk/util/package-info.java  |   20 +
 .../UnboundedReadFromBoundedSourceTest.java     |  374 ++
 .../beam/sdk/util/BatchTimerInternalsTest.java  |  118 +
 .../sdk/util/GroupAlsoByWindowsProperties.java  |  661 +++
 ...oupAlsoByWindowsViaOutputBufferDoFnTest.java |  110 +
 .../util/LateDataDroppingDoFnRunnerTest.java    |  117 +
 .../util/PushbackSideInputDoFnRunnerTest.java   |  234 +
 .../beam/sdk/util/ReduceFnRunnerTest.java       | 1447 ++++++
 .../apache/beam/sdk/util/ReduceFnTester.java    |  789 +++
 .../beam/sdk/util/SimpleDoFnRunnerTest.java     |   86 +
 runners/direct-java/pom.xml                     |   22 +-
 .../runners/direct/AggregatorContainer.java     |  183 +
 .../direct/AvroIOShardedWriteFactory.java       |   76 -
 .../direct/BoundedReadEvaluatorFactory.java     |   26 +-
 .../beam/runners/direct/CloningThreadLocal.java |   43 +
 .../beam/runners/direct/CommittedResult.java    |   23 +-
 .../beam/runners/direct/CompletionCallback.java |    8 +
 .../beam/runners/direct/DirectGroupByKey.java   |    2 +-
 .../beam/runners/direct/DirectRegistrar.java    |    4 +-
 .../beam/runners/direct/DirectRunner.java       |   38 +-
 .../beam/runners/direct/EvaluationContext.java  |   57 +-
 .../beam/runners/direct/EvaluatorKey.java       |   55 -
 .../direct/ExecutorServiceParallelExecutor.java |  186 +-
 .../GroupAlsoByWindowEvaluatorFactory.java      |   33 +-
 .../ImmutabilityCheckingBundleFactory.java      |    4 +-
 .../beam/runners/direct/ParDoEvaluator.java     |   24 +-
 .../direct/ParDoMultiEvaluatorFactory.java      |   53 +-
 .../direct/ParDoSingleEvaluatorFactory.java     |   59 +-
 ...rializableCloningThreadLocalCacheLoader.java |   54 -
 .../runners/direct/ShardControlledWrite.java    |   81 -
 .../runners/direct/StepTransformResult.java     |   95 +-
 .../direct/TextIOShardedWriteFactory.java       |   78 -
 .../direct/TransformEvaluatorFactory.java       |   10 +-
 .../beam/runners/direct/TransformExecutor.java  |    3 +-
 .../beam/runners/direct/TransformResult.java    |   16 +-
 .../direct/UnboundedReadDeduplicator.java       |  102 +
 .../direct/UnboundedReadEvaluatorFactory.java   |   50 +-
 .../runners/direct/ViewEvaluatorFactory.java    |    9 +-
 .../beam/runners/direct/WatermarkManager.java   |    5 +-
 .../runners/direct/WindowEvaluatorFactory.java  |   18 +-
 .../direct/WriteWithShardingFactory.java        |  142 +
 .../beam/runners/direct/package-info.java       |   25 +
 .../runners/direct/AggregatorContainerTest.java |  134 +
 .../direct/AvroIOShardedWriteFactoryTest.java   |  120 -
 .../runners/direct/CloningThreadLocalTest.java  |   92 +
 .../runners/direct/CommittedResultTest.java     |   30 +-
 .../ConsumerTrackingPipelineVisitorTest.java    |   22 +-
 .../runners/direct/DirectRegistrarTest.java     |   17 +-
 .../beam/runners/direct/DirectRunnerTest.java   |   59 +-
 .../runners/direct/EvaluationContextTest.java   |   32 +-
 .../direct/GroupByKeyEvaluatorFactoryTest.java  |    2 +-
 .../GroupByKeyOnlyEvaluatorFactoryTest.java     |    2 +-
 .../ImmutabilityCheckingBundleFactoryTest.java  |    6 +-
 .../ImmutabilityEnforcementFactoryTest.java     |    6 +-
 .../direct/KeyedPValueTrackingVisitorTest.java  |    6 +-
 .../beam/runners/direct/ParDoEvaluatorTest.java |   18 +-
 .../direct/ParDoMultiEvaluatorFactoryTest.java  |   41 +-
 .../direct/ParDoSingleEvaluatorFactoryTest.java |   41 +-
 ...izableCloningThreadLocalCacheLoaderTest.java |   99 -
 .../runners/direct/SideInputContainerTest.java  |    1 +
 .../runners/direct/StepTransformResultTest.java |   91 +
 .../direct/TextIOShardedWriteFactoryTest.java   |  120 -
 .../runners/direct/TransformExecutorTest.java   |   33 +-
 .../direct/UnboundedReadDeduplicatorTest.java   |  134 +
 .../UnboundedReadEvaluatorFactoryTest.java      |   50 +-
 .../runners/direct/WatermarkManagerTest.java    |   18 +-
 .../direct/WindowEvaluatorFactoryTest.java      |  178 +-
 .../direct/WriteWithShardingFactoryTest.java    |  285 +
 runners/flink/README.md                         |   25 +-
 runners/flink/examples/pom.xml                  |   27 +-
 .../beam/runners/flink/examples/TFIDF.java      |   63 +-
 .../beam/runners/flink/examples/WordCount.java  |   12 +-
 .../flink/examples/streaming/AutoComplete.java  |   29 +-
 .../flink/examples/streaming/JoinExamples.java  |   23 +-
 .../examples/streaming/KafkaIOExamples.java     |    4 +-
 .../KafkaWindowedWordCountExample.java          |    8 +-
 .../examples/streaming/WindowedWordCount.java   |    9 +-
 runners/flink/pom.xml                           |    8 +-
 runners/flink/runner/pom.xml                    |  130 +-
 .../FlinkPipelineExecutionEnvironment.java      |    6 +-
 .../apache/beam/runners/flink/FlinkRunner.java  |    2 +-
 .../beam/runners/flink/FlinkRunnerResult.java   |   22 +-
 .../FlinkBatchTransformTranslators.java         |   31 +-
 .../FlinkStreamingTransformTranslators.java     |   22 +-
 .../FlinkStreamingTranslationContext.java       |    8 +-
 .../functions/FlinkAssignContext.java           |   15 +-
 .../functions/FlinkDoFnFunction.java            |   10 +-
 .../FlinkMergingNonShuffleReduceFunction.java   |    8 +-
 .../functions/FlinkMultiOutputDoFnFunction.java |   10 +-
 .../FlinkMultiOutputProcessContext.java         |    6 +-
 .../functions/FlinkNoElementAssignContext.java  |   12 +-
 .../functions/FlinkPartialReduceFunction.java   |    8 +-
 .../functions/FlinkProcessContext.java          |   41 +-
 .../functions/FlinkReduceFunction.java          |    8 +-
 .../translation/types/CoderTypeInformation.java |    6 +-
 .../utils/SerializedPipelineOptions.java        |    8 +-
 .../streaming/FlinkAbstractParDoWrapper.java    |   34 +-
 .../FlinkGroupAlsoByWindowWrapper.java          |   64 +-
 .../streaming/FlinkParDoBoundMultiWrapper.java  |   12 +-
 .../streaming/FlinkParDoBoundWrapper.java       |    4 +-
 .../streaming/io/UnboundedFlinkSink.java        |    2 +-
 .../streaming/io/UnboundedFlinkSource.java      |   30 +-
 .../state/AbstractFlinkTimerInternals.java      |    4 +-
 .../streaming/state/FlinkStateInternals.java    |    7 +-
 .../beam/runners/flink/PipelineOptionsTest.java |    6 +-
 .../beam/runners/flink/ReadSourceITCase.java    |    4 +-
 .../flink/ReadSourceStreamingITCase.java        |    4 +-
 .../beam/runners/flink/WriteSinkITCase.java     |   14 +-
 .../flink/streaming/GroupAlsoByWindowTest.java  |    4 +-
 .../flink/streaming/GroupByNullKeyTest.java     |    8 +-
 .../flink/streaming/StateSerializationTest.java |   20 +
 .../streaming/TopWikipediaSessionsITCase.java   |    6 +-
 runners/google-cloud-dataflow-java/pom.xml      |   78 +-
 .../dataflow/BlockingDataflowRunner.java        |    8 +-
 .../runners/dataflow/DataflowPipelineJob.java   |   90 +-
 .../dataflow/DataflowPipelineTranslator.java    |   21 +-
 .../beam/runners/dataflow/DataflowRunner.java   |  511 +-
 .../dataflow/internal/AssignWindows.java        |    6 +-
 .../DataflowUnboundedReadFromBoundedSource.java |  547 ++
 .../runners/dataflow/internal/IsmFormat.java    |    8 +-
 .../BlockingDataflowPipelineOptions.java        |   27 -
 .../options/DataflowPipelineDebugOptions.java   |   43 -
 .../options/DataflowPipelineOptions.java        |   46 +-
 .../options/DataflowWorkerLoggingOptions.java   |   14 +-
 .../runners/dataflow/options/package-info.java  |   22 +
 .../beam/runners/dataflow/package-info.java     |   22 +
 .../dataflow/testing/TestDataflowRunner.java    |   23 +-
 .../runners/dataflow/testing/package-info.java  |   24 +
 .../dataflow/util/DataflowPathValidator.java    |  100 -
 .../beam/runners/dataflow/util/DoFnInfo.java    |   16 +-
 .../beam/runners/dataflow/util/GcsStager.java   |    5 +-
 .../runners/dataflow/util/MonitoringUtil.java   |   70 +-
 .../runners/dataflow/util/package-info.java     |   20 +
 .../util/GroupAlsoByWindowViaWindowSetDoFn.java |   40 -
 .../dataflow/BlockingDataflowRunnerTest.java    |    9 +-
 .../dataflow/DataflowPipelineJobTest.java       |   43 +-
 .../DataflowPipelineTranslatorTest.java         |   26 +-
 .../runners/dataflow/DataflowRunnerTest.java    |  144 +-
 .../dataflow/RecordingPipelineVisitor.java      |   46 +
 .../runners/dataflow/io/DataflowAvroIOTest.java |   69 -
 .../dataflow/io/DataflowBigQueryIOTest.java     |   94 -
 .../dataflow/io/DataflowDatastoreIOTest.java    |   63 -
 .../dataflow/io/DataflowPubsubIOTest.java       |   63 -
 .../runners/dataflow/io/DataflowTextIOTest.java |   76 -
 .../options/DataflowPipelineOptionsTest.java    |   65 +
 .../testing/TestDataflowRunnerTest.java         |   46 +-
 .../transforms/DataflowCombineTest.java         |   58 -
 .../DataflowDisplayDataEvaluator.java           |   72 -
 .../transforms/DataflowGroupByKeyTest.java      |    2 +-
 .../transforms/DataflowMapElementsTest.java     |   55 -
 .../dataflow/transforms/DataflowViewTest.java   |    4 +-
 .../util/DataflowPathValidatorTest.java         |   94 -
 .../dataflow/util/MonitoringUtilTest.java       |   60 +
 runners/pom.xml                                 |   74 +-
 runners/spark/README.md                         |    8 +-
 runners/spark/pom.xml                           |  102 +-
 .../runners/spark/SparkPipelineOptions.java     |   13 +-
 .../beam/runners/spark/SparkPipelineRunner.java |  255 -
 .../apache/beam/runners/spark/SparkRunner.java  |  249 +
 .../runners/spark/SparkRunnerRegistrar.java     |   14 +-
 .../spark/SparkStreamingPipelineOptions.java    |   41 -
 .../runners/spark/TestSparkPipelineRunner.java  |   77 -
 .../beam/runners/spark/TestSparkRunner.java     |   75 +
 .../runners/spark/aggregators/package-info.java |   20 +
 .../beam/runners/spark/coders/package-info.java |   22 +
 .../beam/runners/spark/examples/WordCount.java  |  137 +
 .../beam/runners/spark/io/CreateStream.java     |    7 +-
 .../apache/beam/runners/spark/io/KafkaIO.java   |   23 +-
 .../beam/runners/spark/io/hadoop/HadoopIO.java  |   38 +-
 .../runners/spark/io/hadoop/package-info.java   |   22 +
 .../beam/runners/spark/io/package-info.java     |   22 +
 .../apache/beam/runners/spark/package-info.java |   22 +
 .../runners/spark/translation/DoFnFunction.java |    8 +-
 .../spark/translation/EvaluationContext.java    |   21 +
 .../spark/translation/MultiDoFnFunction.java    |    8 +-
 .../spark/translation/SparkContextFactory.java  |    8 +-
 .../translation/SparkPipelineEvaluator.java     |    6 +-
 .../spark/translation/SparkProcessContext.java  |   18 +-
 .../spark/translation/TransformTranslator.java  |   73 +-
 .../runners/spark/translation/package-info.java |   22 +
 .../streaming/StreamingTransformTranslator.java |    4 +-
 .../StreamingWindowPipelineDetector.java        |    6 +-
 .../translation/streaming/package-info.java     |   22 +
 .../beam/runners/spark/util/package-info.java   |   22 +
 .../apache/beam/runners/spark/DeDupTest.java    |    4 +-
 .../beam/runners/spark/EmptyInputTest.java      |    4 +-
 .../beam/runners/spark/SimpleWordCountTest.java |   87 +-
 .../runners/spark/SparkRunnerRegistrarTest.java |    4 +-
 .../apache/beam/runners/spark/TfIdfTest.java    |  203 +-
 .../beam/runners/spark/io/AvroPipelineTest.java |    9 +-
 .../beam/runners/spark/io/NumShardsTest.java    |    8 +-
 .../io/hadoop/HadoopFileFormatPipelineTest.java |    9 +-
 .../spark/translation/CombineGloballyTest.java  |    6 +-
 .../spark/translation/CombinePerKeyTest.java    |   13 +-
 .../spark/translation/DoFnOutputTest.java       |   14 +-
 .../translation/MultiOutputWordCountTest.java   |   17 +-
 .../spark/translation/SerializationTest.java    |   16 +-
 .../spark/translation/SideEffectsTest.java      |   14 +-
 .../translation/TransformTranslatorTest.java    |   45 +-
 .../translation/WindowedWordCountTest.java      |   20 +-
 .../streaming/FlattenStreamingTest.java         |   14 +-
 .../streaming/KafkaStreamingTest.java           |   23 +-
 .../streaming/SimpleStreamingWordCountTest.java |   20 +-
 .../streaming/utils/EmbeddedKafkaCluster.java   |    4 +-
 runners/spark/src/test/resources/person.avsc    |   25 +-
 runners/spark/src/test/resources/pg1112.txt     | 4853 ------------------
 runners/spark/src/test/resources/pg2264.txt     | 3667 -------------
 sdks/java/build-tools/pom.xml                   |    2 +-
 .../src/main/resources/beam/checkstyle.xml      |   19 +-
 .../src/main/resources/beam/findbugs-filter.xml |  142 +
 .../src/main/resources/beam/suppressions.xml    |   24 +
 sdks/java/core/pom.xml                          |   61 +-
 .../main/java/org/apache/beam/sdk/Pipeline.java |    9 +-
 .../org/apache/beam/sdk/PipelineResult.java     |   38 +
 .../org/apache/beam/sdk/coders/AvroCoder.java   |   21 +-
 .../java/org/apache/beam/sdk/coders/Coder.java  |   12 +-
 .../apache/beam/sdk/coders/CoderRegistry.java   |   43 +-
 .../apache/beam/sdk/coders/CollectionCoder.java |    7 +-
 .../apache/beam/sdk/coders/DelegateCoder.java   |   26 +-
 .../apache/beam/sdk/coders/DurationCoder.java   |    1 -
 .../org/apache/beam/sdk/coders/EntityCoder.java |   87 -
 .../apache/beam/sdk/coders/InstantCoder.java    |   34 +-
 .../apache/beam/sdk/coders/IterableCoder.java   |    7 +-
 .../beam/sdk/coders/IterableLikeCoder.java      |   10 +-
 .../org/apache/beam/sdk/coders/JAXBCoder.java   |   28 +-
 .../org/apache/beam/sdk/coders/KvCoder.java     |    7 +-
 .../org/apache/beam/sdk/coders/ListCoder.java   |    7 +-
 .../org/apache/beam/sdk/coders/MapCoder.java    |    6 +-
 .../apache/beam/sdk/coders/NullableCoder.java   |    9 +-
 .../beam/sdk/coders/SerializableCoder.java      |    6 +-
 .../org/apache/beam/sdk/coders/SetCoder.java    |    7 +-
 .../beam/sdk/coders/StringDelegateCoder.java    |   51 +-
 .../java/org/apache/beam/sdk/io/AvroIO.java     |   74 +-
 .../java/org/apache/beam/sdk/io/AvroSource.java |   14 +-
 .../java/org/apache/beam/sdk/io/BigQueryIO.java | 2493 ---------
 .../sdk/io/BoundedReadFromUnboundedSource.java  |    6 +-
 .../apache/beam/sdk/io/CompressedSource.java    |   81 +-
 .../org/apache/beam/sdk/io/DatastoreIO.java     |  988 ----
 .../org/apache/beam/sdk/io/FileBasedSink.java   |   22 +-
 .../apache/beam/sdk/io/OffsetBasedSource.java   |   44 +-
 .../java/org/apache/beam/sdk/io/PubsubIO.java   |   49 +-
 .../apache/beam/sdk/io/PubsubUnboundedSink.java |   37 +-
 .../beam/sdk/io/PubsubUnboundedSource.java      |    8 +-
 .../main/java/org/apache/beam/sdk/io/Read.java  |   29 +-
 .../java/org/apache/beam/sdk/io/Source.java     |    2 +-
 .../java/org/apache/beam/sdk/io/TextIO.java     |   87 +-
 .../org/apache/beam/sdk/io/UnboundedSource.java |   28 +-
 .../main/java/org/apache/beam/sdk/io/Write.java |  332 +-
 .../java/org/apache/beam/sdk/io/XmlSink.java    |   10 +-
 .../java/org/apache/beam/sdk/io/XmlSource.java  |   11 +-
 .../org/apache/beam/sdk/io/package-info.java    |    9 +-
 .../beam/sdk/io/range/ByteKeyRangeTracker.java  |   63 +-
 .../beam/sdk/io/range/OffsetRangeTracker.java   |   13 +-
 .../org/apache/beam/sdk/options/GcpOptions.java |   37 +-
 .../org/apache/beam/sdk/options/GcsOptions.java |   44 +
 .../beam/sdk/options/PipelineOptions.java       |    3 +-
 .../sdk/options/PipelineOptionsFactory.java     |   41 +-
 .../sdk/options/PipelineOptionsReflector.java   |    1 +
 .../sdk/options/PipelineOptionsValidator.java   |   18 +-
 .../sdk/options/ProxyInvocationHandler.java     |   15 +-
 .../beam/sdk/runners/AggregatorValues.java      |    4 +-
 .../apache/beam/sdk/runners/PipelineRunner.java |    6 +-
 .../sdk/runners/RecordingPipelineVisitor.java   |   47 -
 .../beam/sdk/runners/TransformHierarchy.java    |   10 +-
 .../beam/sdk/runners/TransformTreeNode.java     |   13 +-
 .../beam/sdk/testing/CoderProperties.java       |   85 +-
 .../org/apache/beam/sdk/testing/PAssert.java    |  532 +-
 .../apache/beam/sdk/testing/PaneExtractors.java |  140 +
 .../beam/sdk/testing/SerializableMatchers.java  |   27 +-
 .../apache/beam/sdk/testing/StaticWindows.java  |  110 +
 .../apache/beam/sdk/testing/TestPipeline.java   |    4 +-
 .../beam/sdk/testing/TestPipelineOptions.java   |    1 +
 .../beam/sdk/testing/WindowFnTestUtils.java     |    5 +-
 .../apache/beam/sdk/testing/WindowSupplier.java |   83 +
 .../apache/beam/sdk/transforms/Aggregator.java  |   32 +-
 .../sdk/transforms/AggregatorRetriever.java     |    6 +-
 .../sdk/transforms/ApproximateQuantiles.java    |    9 +-
 .../org/apache/beam/sdk/transforms/Combine.java |  238 +-
 .../apache/beam/sdk/transforms/CombineFns.java  |   10 +-
 .../org/apache/beam/sdk/transforms/Count.java   |    4 +-
 .../org/apache/beam/sdk/transforms/Create.java  |    7 +-
 .../org/apache/beam/sdk/transforms/DoFn.java    |  418 +-
 .../beam/sdk/transforms/DoFnReflector.java      |  807 ++-
 .../apache/beam/sdk/transforms/DoFnTester.java  |  202 +-
 .../beam/sdk/transforms/DoFnWithContext.java    |  429 --
 .../org/apache/beam/sdk/transforms/Filter.java  |    5 +-
 .../beam/sdk/transforms/FlatMapElements.java    |  126 +-
 .../org/apache/beam/sdk/transforms/Flatten.java |    4 +-
 .../apache/beam/sdk/transforms/GroupByKey.java  |    2 +-
 .../transforms/IntraBundleParallelization.java  |   49 +-
 .../org/apache/beam/sdk/transforms/Keys.java    |   13 +-
 .../org/apache/beam/sdk/transforms/KvSwap.java  |   15 +-
 .../apache/beam/sdk/transforms/MapElements.java |   60 +-
 .../org/apache/beam/sdk/transforms/Max.java     |   42 +-
 .../org/apache/beam/sdk/transforms/Min.java     |   45 +-
 .../org/apache/beam/sdk/transforms/OldDoFn.java |  567 ++
 .../apache/beam/sdk/transforms/PTransform.java  |   29 +-
 .../org/apache/beam/sdk/transforms/ParDo.java   |  287 +-
 .../apache/beam/sdk/transforms/Partition.java   |    2 +-
 .../beam/sdk/transforms/RemoveDuplicates.java   |   13 +-
 .../org/apache/beam/sdk/transforms/Sample.java  |   10 +-
 .../beam/sdk/transforms/SimpleFunction.java     |   42 +-
 .../org/apache/beam/sdk/transforms/Top.java     |    7 +-
 .../org/apache/beam/sdk/transforms/Values.java  |   15 +-
 .../apache/beam/sdk/transforms/WithKeys.java    |   15 +-
 .../beam/sdk/transforms/WithTimestamps.java     |    4 +-
 .../sdk/transforms/display/DisplayData.java     |    7 +-
 .../beam/sdk/transforms/join/CoGbkResult.java   |    9 +-
 .../beam/sdk/transforms/join/CoGroupByKey.java  |   16 +-
 .../beam/sdk/transforms/windowing/AfterAll.java |    6 +-
 .../windowing/AfterDelayFromFirstElement.java   |    6 +
 .../sdk/transforms/windowing/AfterEach.java     |    1 +
 .../sdk/transforms/windowing/AfterFirst.java    |    9 +-
 .../sdk/transforms/windowing/AfterPane.java     |    4 +
 .../windowing/AfterProcessingTime.java          |    6 +-
 .../transforms/windowing/AfterWatermark.java    |   38 +-
 .../sdk/transforms/windowing/GlobalWindows.java |    5 -
 .../transforms/windowing/IntervalWindow.java    |    1 -
 .../beam/sdk/transforms/windowing/Never.java    |    1 +
 .../beam/sdk/transforms/windowing/PaneInfo.java |   27 +-
 .../windowing/PartitioningWindowFn.java         |    5 -
 .../beam/sdk/transforms/windowing/Trigger.java  |    7 +-
 .../transforms/windowing/TriggerBuilder.java    |   29 -
 .../beam/sdk/transforms/windowing/Window.java   |   97 +-
 .../beam/sdk/transforms/windowing/WindowFn.java |   11 +-
 .../org/apache/beam/sdk/util/AssignWindows.java |   46 -
 .../apache/beam/sdk/util/AssignWindowsDoFn.java |   75 -
 ...AttemptAndTimeBoundedExponentialBackOff.java |   15 +-
 .../util/AttemptBoundedExponentialBackOff.java  |    9 +-
 .../org/apache/beam/sdk/util/AvroUtils.java     |  207 -
 .../beam/sdk/util/BaseExecutionContext.java     |    4 +-
 .../beam/sdk/util/BatchTimerInternals.java      |  140 -
 .../apache/beam/sdk/util/BigQueryServices.java  |  165 -
 .../beam/sdk/util/BigQueryServicesImpl.java     |  478 --
 .../beam/sdk/util/BigQueryTableInserter.java    |  459 --
 .../beam/sdk/util/BigQueryTableRowIterator.java |  472 --
 .../apache/beam/sdk/util/BucketingFunction.java |    1 +
 .../org/apache/beam/sdk/util/CoderUtils.java    |    4 +-
 .../beam/sdk/util/CombineContextFactory.java    |    6 +-
 .../apache/beam/sdk/util/CounterAggregator.java |   35 +-
 .../org/apache/beam/sdk/util/Credentials.java   |    5 +-
 .../org/apache/beam/sdk/util/DoFnRunner.java    |   62 -
 .../apache/beam/sdk/util/DoFnRunnerBase.java    |  558 --
 .../org/apache/beam/sdk/util/DoFnRunners.java   |  144 -
 .../apache/beam/sdk/util/ExecutableTrigger.java |   11 +-
 .../apache/beam/sdk/util/ExecutionContext.java  |    8 +-
 .../sdk/util/ExposedByteArrayInputStream.java   |    3 +
 .../sdk/util/ExposedByteArrayOutputStream.java  |    4 +
 .../beam/sdk/util/FileIOChannelFactory.java     |   34 +-
 .../apache/beam/sdk/util/GatherAllPanes.java    |   16 +-
 .../apache/beam/sdk/util/GcsPathValidator.java  |   97 +
 .../java/org/apache/beam/sdk/util/GcsUtil.java  |   10 +-
 .../beam/sdk/util/GroupAlsoByWindowsDoFn.java   |   59 -
 .../GroupAlsoByWindowsViaOutputBufferDoFn.java  |  100 -
 .../sdk/util/GroupByKeyViaGroupByKeyOnly.java   |  247 -
 .../apache/beam/sdk/util/IOChannelUtils.java    |   26 +-
 .../apache/beam/sdk/util/IdentityWindowFn.java  |   20 +-
 .../apache/beam/sdk/util/InstanceBuilder.java   |   21 +-
 .../util/IntervalBoundedExponentialBackOff.java |   10 +-
 .../sdk/util/LateDataDroppingDoFnRunner.java    |  147 -
 .../beam/sdk/util/MergingActiveWindowSet.java   |   35 +-
 .../apache/beam/sdk/util/MovingFunction.java    |    1 +
 .../org/apache/beam/sdk/util/NonEmptyPanes.java |  150 -
 .../apache/beam/sdk/util/PCollectionViews.java  |    4 +-
 .../apache/beam/sdk/util/PaneInfoTracker.java   |  154 -
 .../beam/sdk/util/PerKeyCombineFnRunner.java    |   44 +-
 .../beam/sdk/util/PerKeyCombineFnRunners.java   |   30 +-
 .../org/apache/beam/sdk/util/PubsubClient.java  |    1 +
 .../apache/beam/sdk/util/PubsubTestClient.java  |    1 +
 .../sdk/util/PushbackSideInputDoFnRunner.java   |  115 -
 .../java/org/apache/beam/sdk/util/ReduceFn.java |  130 -
 .../beam/sdk/util/ReduceFnContextFactory.java   |  497 --
 .../apache/beam/sdk/util/ReduceFnRunner.java    |  985 ----
 .../sdk/util/ReifyTimestampAndWindowsDoFn.java  |    6 +-
 .../sdk/util/ReifyTimestampsAndWindows.java     |   63 +
 .../org/apache/beam/sdk/util/ReleaseInfo.java   |   12 +-
 .../org/apache/beam/sdk/util/Reshuffle.java     |   10 +-
 .../apache/beam/sdk/util/SerializableUtils.java |   15 +-
 .../apache/beam/sdk/util/SimpleDoFnRunner.java  |   56 -
 .../org/apache/beam/sdk/util/StringUtils.java   |   14 +-
 .../beam/sdk/util/SystemDoFnInternal.java       |    6 +-
 .../apache/beam/sdk/util/SystemReduceFn.java    |  135 -
 .../org/apache/beam/sdk/util/TimeDomain.java    |    2 +-
 .../apache/beam/sdk/util/TimerInternals.java    |    6 +-
 .../beam/sdk/util/TriggerContextFactory.java    |   16 +-
 .../org/apache/beam/sdk/util/TriggerRunner.java |  234 -
 .../apache/beam/sdk/util/ValueWithRecordId.java |   27 +-
 .../org/apache/beam/sdk/util/WatermarkHold.java |  536 --
 .../org/apache/beam/sdk/util/WindowedValue.java |  205 +-
 .../beam/sdk/util/WindowingInternals.java       |    4 +-
 .../apache/beam/sdk/util/WindowingStrategy.java |    6 +-
 .../java/org/apache/beam/sdk/util/ZipFiles.java |   11 +
 .../apache/beam/sdk/util/common/Counter.java    |    2 +-
 .../util/common/ElementByteSizeObservable.java  |   42 -
 .../util/common/ElementByteSizeObserver.java    |   18 +-
 .../beam/sdk/util/common/PeekingReiterator.java |   99 -
 .../beam/sdk/util/common/ReflectHelpers.java    |   23 +-
 .../beam/sdk/util/common/package-info.java      |    3 +-
 .../org/apache/beam/sdk/util/gcsfs/GcsPath.java |   51 +-
 .../beam/sdk/util/gcsfs/package-info.java       |    3 +-
 .../org/apache/beam/sdk/util/package-info.java  |    3 +-
 .../CopyOnAccessInMemoryStateInternals.java     |    4 +-
 .../sdk/util/state/StateInternalsFactory.java   |   36 +
 .../beam/sdk/util/state/StateMerging.java       |   24 +-
 .../beam/sdk/util/state/StateNamespaces.java    |    2 +-
 .../apache/beam/sdk/util/state/StateTable.java  |   12 +-
 .../apache/beam/sdk/util/state/StateTags.java   |    6 +-
 .../beam/sdk/util/state/package-info.java       |   22 +
 .../org/apache/beam/sdk/values/PCollection.java |   34 +-
 .../apache/beam/sdk/values/PCollectionList.java |    2 +-
 .../org/apache/beam/sdk/values/POutput.java     |    2 +-
 .../beam/sdk/values/TimestampedValue.java       |    1 -
 .../org/apache/beam/sdk/values/TupleTag.java    |    2 +-
 .../apache/beam/sdk/values/TypeDescriptors.java |   13 +
 .../dataflow/util/GcsPathValidatorTest.java     |  104 +
 .../java/org/apache/beam/sdk/PipelineTest.java  |    2 +-
 .../org/apache/beam/sdk/WindowMatchers.java     |   80 +-
 .../org/apache/beam/sdk/WindowMatchersTest.java |   84 +
 .../apache/beam/sdk/coders/AvroCoderTest.java   |   54 +-
 .../beam/sdk/coders/BigDecimalCoderTest.java    |   15 +-
 .../beam/sdk/coders/BigIntegerCoderTest.java    |   14 +-
 .../beam/sdk/coders/ByteArrayCoderTest.java     |    5 +-
 .../beam/sdk/coders/CoderRegistryTest.java      |   22 +-
 .../beam/sdk/coders/DelegateCoderTest.java      |   43 +
 .../apache/beam/sdk/coders/EntityCoderTest.java |  110 -
 .../apache/beam/sdk/coders/JAXBCoderTest.java   |   69 +-
 .../beam/sdk/coders/NullableCoderTest.java      |   26 +-
 .../beam/sdk/coders/SerializableCoderTest.java  |   15 +-
 .../sdk/coders/protobuf/ProtobufUtilTest.java   |    7 +-
 .../beam/sdk/io/AvroIOGeneratedClassTest.java   |  192 +-
 .../java/org/apache/beam/sdk/io/AvroIOTest.java |   53 +-
 .../org/apache/beam/sdk/io/AvroSourceTest.java  |   20 +-
 .../org/apache/beam/sdk/io/BigQueryIOTest.java  | 1128 ----
 .../io/BoundedReadFromUnboundedSourceTest.java  |    1 +
 .../beam/sdk/io/CompressedSourceTest.java       |   86 +
 .../apache/beam/sdk/io/CountingInputTest.java   |    3 +-
 .../apache/beam/sdk/io/CountingSourceTest.java  |    2 +-
 .../org/apache/beam/sdk/io/DatastoreIOTest.java |  621 ---
 .../apache/beam/sdk/io/FileBasedSourceTest.java |    5 +-
 .../beam/sdk/io/OffsetBasedSourceTest.java      |    6 +-
 .../org/apache/beam/sdk/io/PubsubIOTest.java    |   34 +-
 .../beam/sdk/io/PubsubUnboundedSinkTest.java    |    2 +-
 .../java/org/apache/beam/sdk/io/ReadTest.java   |    1 +
 .../java/org/apache/beam/sdk/io/TextIOTest.java |  279 +-
 .../java/org/apache/beam/sdk/io/WriteTest.java  |  152 +-
 .../org/apache/beam/sdk/io/XmlSinkTest.java     |    1 +
 .../org/apache/beam/sdk/io/XmlSourceTest.java   |   19 +-
 .../sdk/io/range/ByteKeyRangeTrackerTest.java   |   58 +-
 .../sdk/io/range/OffsetRangeTrackerTest.java    |   91 +-
 .../apache/beam/sdk/options/GcpOptionsTest.java |   26 +
 .../sdk/options/GoogleApiDebugOptionsTest.java  |    1 -
 .../sdk/options/PipelineOptionsFactoryTest.java |    1 -
 .../beam/sdk/options/PipelineOptionsTest.java   |    1 -
 .../sdk/options/ProxyInvocationHandlerTest.java |    2 +-
 .../AggregatorPipelineExtractorTest.java        |    6 +-
 .../beam/sdk/runners/TransformTreeTest.java     |    6 +-
 .../beam/sdk/testing/CoderPropertiesTest.java   |   26 +
 .../sdk/testing/DataflowJUnitTestRunner.java    |  130 -
 .../apache/beam/sdk/testing/PAssertTest.java    |  117 +-
 .../beam/sdk/testing/PaneExtractorsTest.java    |  323 ++
 .../beam/sdk/testing/StaticWindowsTest.java     |   94 +
 .../beam/sdk/testing/TestPipelineTest.java      |    1 -
 .../beam/sdk/testing/WindowSupplierTest.java    |   89 +
 .../transforms/ApproximateQuantilesTest.java    |    1 +
 .../sdk/transforms/ApproximateUniqueTest.java   |    3 +-
 .../beam/sdk/transforms/CombineFnsTest.java     |    4 +-
 .../apache/beam/sdk/transforms/CombineTest.java |   46 +-
 .../apache/beam/sdk/transforms/CreateTest.java  |    6 +-
 .../beam/sdk/transforms/DoFnContextTest.java    |   69 -
 .../DoFnDelegatingAggregatorTest.java           |   16 +-
 .../beam/sdk/transforms/DoFnReflectorTest.java  |  340 +-
 .../apache/beam/sdk/transforms/DoFnTest.java    |   96 +-
 .../beam/sdk/transforms/DoFnTesterTest.java     |   78 +-
 .../sdk/transforms/DoFnWithContextTest.java     |  237 -
 .../sdk/transforms/FlatMapElementsTest.java     |   48 +
 .../apache/beam/sdk/transforms/FlattenTest.java |    4 +-
 .../beam/sdk/transforms/GroupByKeyTest.java     |    6 +-
 .../IntraBundleParallelizationTest.java         |   23 +-
 .../beam/sdk/transforms/MapElementsTest.java    |  107 +
 .../org/apache/beam/sdk/transforms/MaxTest.java |    1 +
 .../org/apache/beam/sdk/transforms/MinTest.java |    2 +
 .../apache/beam/sdk/transforms/NoOpDoFn.java    |  144 -
 .../apache/beam/sdk/transforms/NoOpOldDoFn.java |  144 +
 .../beam/sdk/transforms/OldDoFnContextTest.java |   69 +
 .../apache/beam/sdk/transforms/OldDoFnTest.java |  242 +
 .../beam/sdk/transforms/PTransformTest.java     |    7 +-
 .../apache/beam/sdk/transforms/ParDoTest.java   |  136 +-
 .../beam/sdk/transforms/PartitionTest.java      |    1 +
 .../apache/beam/sdk/transforms/SampleTest.java  |    7 +-
 .../org/apache/beam/sdk/transforms/SumTest.java |   33 +
 .../org/apache/beam/sdk/transforms/TopTest.java |    1 +
 .../apache/beam/sdk/transforms/ViewTest.java    |  400 +-
 .../beam/sdk/transforms/WithTimestampsTest.java |    4 +-
 .../display/DisplayDataEvaluator.java           |   41 +-
 .../display/DisplayDataEvaluatorTest.java       |   18 +-
 .../transforms/display/DisplayDataMatchers.java |   22 +
 .../display/DisplayDataMatchersTest.java        |   17 +-
 .../sdk/transforms/display/DisplayDataTest.java |   21 +-
 .../dofnreflector/DoFnReflectorTestHelper.java  |  116 +
 .../sdk/transforms/join/CoGroupByKeyTest.java   |   26 +-
 .../windowing/AfterProcessingTimeTest.java      |    3 +-
 .../windowing/AfterWatermarkTest.java           |   24 +-
 .../sdk/transforms/windowing/NeverTest.java     |    1 +
 .../sdk/transforms/windowing/WindowTest.java    |   10 +-
 .../sdk/transforms/windowing/WindowingTest.java |   27 +-
 .../apache/beam/sdk/util/ApiSurfaceTest.java    |    4 +-
 .../org/apache/beam/sdk/util/AvroUtilsTest.java |   78 -
 .../beam/sdk/util/BatchTimerInternalsTest.java  |  118 -
 .../beam/sdk/util/BigQueryServicesImplTest.java |  303 --
 .../sdk/util/BigQueryTableInserterTest.java     |  306 --
 .../sdk/util/BigQueryTableRowIteratorTest.java  |  256 -
 .../apache/beam/sdk/util/BigQueryUtilTest.java  |  481 --
 .../beam/sdk/util/BucketingFunctionTest.java    |    4 +-
 .../beam/sdk/util/FileIOChannelFactoryTest.java |   26 +-
 .../beam/sdk/util/GatherAllPanesTest.java       |   16 +-
 .../sdk/util/GroupAlsoByWindowsProperties.java  |  619 ---
 ...oupAlsoByWindowsViaOutputBufferDoFnTest.java |  106 -
 .../beam/sdk/util/IOChannelUtilsTest.java       |   18 +-
 .../sdk/util/IdentitySideInputWindowFn.java     |    3 +-
 .../util/LateDataDroppingDoFnRunnerTest.java    |  117 -
 .../sdk/util/MergingActiveWindowSetTest.java    |    6 +-
 .../beam/sdk/util/MovingFunctionTest.java       |    4 +-
 .../util/PushbackSideInputDoFnRunnerTest.java   |  234 -
 .../beam/sdk/util/ReduceFnRunnerTest.java       | 1448 ------
 .../apache/beam/sdk/util/ReduceFnTester.java    |  784 ---
 .../util/RetryHttpRequestInitializerTest.java   |   19 +-
 .../beam/sdk/util/SerializableUtilsTest.java    |    1 -
 .../apache/beam/sdk/util/SerializerTest.java    |    1 -
 .../beam/sdk/util/SimpleDoFnRunnerTest.java     |   86 -
 .../apache/beam/sdk/util/StringUtilsTest.java   |   35 +-
 .../org/apache/beam/sdk/util/TriggerTester.java |   31 +-
 .../beam/sdk/util/common/CounterTest.java       |    1 +
 .../beam/sdk/util/common/CounterTestUtils.java  |   57 -
 .../beam/sdk/util/state/StateTagTest.java       |    5 +-
 .../beam/sdk/values/PCollectionTupleTest.java   |    2 +-
 .../apache/beam/sdk/values/TypedPValueTest.java |   20 +-
 sdks/java/extensions/join-library/pom.xml       |   11 +-
 .../beam/sdk/extensions/joinlibrary/Join.java   |   25 +-
 .../extensions/joinlibrary/package-info.java    |   23 +
 sdks/java/extensions/pom.xml                    |    2 +-
 sdks/java/io/google-cloud-platform/pom.xml      |  151 +-
 .../sdk/io/gcp/bigquery/BigQueryAvroUtils.java  |  236 +
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    | 2750 ++++++++++
 .../sdk/io/gcp/bigquery/BigQueryServices.java   |  191 +
 .../io/gcp/bigquery/BigQueryServicesImpl.java   |  722 +++
 .../io/gcp/bigquery/BigQueryTableInserter.java  |  221 +
 .../gcp/bigquery/BigQueryTableRowIterator.java  |  474 ++
 .../beam/sdk/io/gcp/bigquery/package-info.java  |   24 +
 .../beam/sdk/io/gcp/bigtable/BigtableIO.java    |  397 +-
 .../sdk/io/gcp/bigtable/BigtableService.java    |   10 +-
 .../io/gcp/bigtable/BigtableServiceImpl.java    |   68 +-
 .../io/gcp/bigtable/BigtableTestOptions.java    |   37 +
 .../beam/sdk/io/gcp/datastore/DatastoreIO.java  |   41 +
 .../beam/sdk/io/gcp/datastore/V1Beta3.java      |  969 ++++
 .../beam/sdk/io/gcp/datastore/package-info.java |   24 +
 .../io/gcp/bigquery/BigQueryAvroUtilsTest.java  |  143 +
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     | 1438 ++++++
 .../gcp/bigquery/BigQueryServicesImplTest.java  |  359 ++
 .../gcp/bigquery/BigQueryTableInserterTest.java |  249 +
 .../bigquery/BigQueryTableRowIteratorTest.java  |  256 +
 .../sdk/io/gcp/bigquery/BigQueryUtilTest.java   |  454 ++
 .../sdk/io/gcp/bigtable/BigtableIOTest.java     |  170 +-
 .../sdk/io/gcp/bigtable/BigtableReadIT.java     |   60 +
 .../sdk/io/gcp/bigtable/BigtableWriteIT.java    |  214 +
 .../sdk/io/gcp/datastore/V1Beta3ReadIT.java     |  114 +
 .../beam/sdk/io/gcp/datastore/V1Beta3Test.java  |  587 +++
 .../io/gcp/datastore/V1Beta3TestOptions.java    |   44 +
 .../sdk/io/gcp/datastore/V1Beta3TestUtil.java   |  382 ++
 .../sdk/io/gcp/datastore/V1Beta3WriteIT.java    |   85 +
 sdks/java/io/hdfs/pom.xml                       |   64 +-
 .../beam/sdk/io/hdfs/AvroHDFSFileSource.java    |  145 +
 .../beam/sdk/io/hdfs/AvroWrapperCoder.java      |  116 +
 .../apache/beam/sdk/io/hdfs/HDFSFileSink.java   |  277 +
 .../apache/beam/sdk/io/hdfs/HDFSFileSource.java |   63 +-
 .../apache/beam/sdk/io/hdfs/WritableCoder.java  |    9 +-
 .../apache/beam/sdk/io/hdfs/package-info.java   |   22 +
 .../SimpleAuthAvroHDFSFileSource.java           |   84 +
 .../hdfs/simpleauth/SimpleAuthHDFSFileSink.java |  132 +
 .../simpleauth/SimpleAuthHDFSFileSource.java    |  122 +
 .../sdk/io/hdfs/simpleauth/package-info.java    |   22 +
 .../beam/sdk/io/hdfs/AvroWrapperCoderTest.java  |   52 +
 .../beam/sdk/io/hdfs/WritableCoderTest.java     |    9 +
 sdks/java/io/jms/pom.xml                        |  134 +
 .../beam/sdk/io/jms/JmsCheckpointMark.java      |   82 +
 .../java/org/apache/beam/sdk/io/jms/JmsIO.java  |  516 ++
 .../org/apache/beam/sdk/io/jms/JmsRecord.java   |  153 +
 .../apache/beam/sdk/io/jms/package-info.java    |   22 +
 .../org/apache/beam/sdk/io/jms/JmsIOTest.java   |  145 +
 sdks/java/io/kafka/pom.xml                      |   36 +-
 .../org/apache/beam/sdk/io/kafka/KafkaIO.java   |   24 +-
 .../apache/beam/sdk/io/kafka/package-info.java  |   22 +
 .../apache/beam/sdk/io/kafka/KafkaIOTest.java   |   16 +-
 sdks/java/io/pom.xml                            |    3 +-
 sdks/java/java8tests/pom.xml                    |   17 +-
 .../transforms/RemoveDuplicatesJava8Test.java   |   10 +-
 .../beam/sdk/transforms/WithKeysJava8Test.java  |   11 +-
 .../sdk/transforms/WithTimestampsJava8Test.java |    4 +-
 sdks/java/maven-archetypes/examples/pom.xml     |    2 +-
 .../main/resources/archetype-resources/pom.xml  |    7 +
 .../src/main/java/DebuggingWordCount.java       |    6 +-
 .../src/main/java/MinimalWordCount.java         |    6 +-
 .../src/main/java/WindowedWordCount.java        |    8 +-
 .../src/main/java/WordCount.java                |   10 +-
 .../main/java/common/DataflowExampleUtils.java  |    5 +-
 .../main/java/common/PubsubFileInjector.java    |    4 +-
 .../projects/basic/archetype.properties         |    2 +-
 sdks/java/maven-archetypes/pom.xml              |    2 +-
 sdks/java/maven-archetypes/starter/pom.xml      |    8 +-
 .../src/main/java/StarterPipeline.java          |    6 +-
 .../projects/basic/archetype.properties         |    2 +-
 .../resources/projects/basic/reference/pom.xml  |    2 +-
 .../src/main/java/it/pkg/StarterPipeline.java   |    6 +-
 sdks/java/microbenchmarks/README.md             |   42 +
 sdks/java/microbenchmarks/pom.xml               |  110 +
 .../coders/AvroCoderBenchmark.java              |  121 +
 .../coders/ByteArrayCoderBenchmark.java         |   66 +
 .../coders/CoderBenchmarking.java               |   42 +
 .../coders/StringUtf8CoderBenchmark.java        |   72 +
 .../microbenchmarks/coders/package-info.java    |   22 +
 .../transforms/DoFnReflectorBenchmark.java      |  243 +
 .../transforms/package-info.java                |   22 +
 sdks/java/pom.xml                               |    3 +-
 sdks/pom.xml                                    |    2 +-
 699 files changed, 38013 insertions(+), 35273 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/c3988118/.travis.yml
----------------------------------------------------------------------
diff --cc .travis.yml
index 3ff4834,45bad9a..8d12668
--- a/.travis.yml
+++ b/.travis.yml
@@@ -28,44 -28,28 +28,44 @@@ notifications
      on_success: change
      on_failure: always
  
 +addons:
 +  apt:
 +    packages:
 +    - python2.7
 +
  matrix:
    include:
 -    # On OSX, run with default JDK only.
 +    # TODO(altay): Re-enable Java tests before merging python-sdk branch to master.
 +    ## On OSX, run with default JDK only.
 +    #- os: osx
 +    #  env: MAVEN_OVERRIDE=""
 +    ## On Linux, run with specific JDKs only.
 +    #- os: linux
-     #  env: CUSTOM_JDK="oraclejdk8" MAVEN_OVERRIDE="-DforkCount=0"
++    #  env: CUSTOM_JDK="oraclejdk8" MAVEN_OVERRIDE="-DbeamSurefireArgline='-Xmx512m'"
 +    #- os: linux
-     #  env: CUSTOM_JDK="oraclejdk7" MAVEN_OVERRIDE="-DforkCount=0"
++    #  env: CUSTOM_JDK="oraclejdk7" MAVEN_OVERRIDE="-DbeamSurefireArgline='-Xmx512m'"
 +    #- os: linux
-     #  env: CUSTOM_JDK="openjdk7" MAVEN_OVERRIDE="-DforkCount=0"
++    #  env: CUSTOM_JDK="openjdk7" MAVEN_OVERRIDE="-DbeamSurefireArgline='-Xmx512m'"
 +    # Python SDK environments.
      - os: osx
 -      env: MAVEN_OVERRIDE=""
 -    # On Linux, run with specific JDKs only.
 -    - os: linux
 -      env: CUSTOM_JDK="oraclejdk8" MAVEN_OVERRIDE="-DbeamSurefireArgline='-Xmx512m'"
 -    - os: linux
 -      env: CUSTOM_JDK="oraclejdk7" MAVEN_OVERRIDE="-DbeamSurefireArgline='-Xmx512m'"
 +      env: TEST_PYTHON="1"
      - os: linux
 -      env: CUSTOM_JDK="openjdk7" MAVEN_OVERRIDE="-DbeamSurefireArgline='-Xmx512m'"
 +      env: TEST_PYTHON="1"
  
  before_install:
 -  - echo "MAVEN_OPTS='-Xmx1024m -XX:MaxPermSize=512m'" > ~/.mavenrc
 +  - echo "MAVEN_OPTS='-Xmx2048m -XX:MaxPermSize=512m'" > ~/.mavenrc
    - if [ "$TRAVIS_OS_NAME" == "osx" ]; then export JAVA_HOME=$(/usr/libexec/java_home); fi
    - if [ "$TRAVIS_OS_NAME" == "linux" ]; then jdk_switcher use "$CUSTOM_JDK"; fi
 -  - export BEAM_SUREFIRE_ARGLINE="-Xmx512m"
 +  # Python SDK environment settings.
 +  - export TOX_ENV=py27
 +  - if [ "$TRAVIS_OS_NAME" == "osx" ]; then export TOX_HOME=$HOME/Library/Python/2.7/bin; fi
 +  - if [ "$TRAVIS_OS_NAME" == "linux" ]; then export TOX_HOME=$HOME/.local/bin; fi
  
  install:
 -  - travis_retry mvn -B install clean -U -DskipTests=true
 +  - if [ ! "$TEST_PYTHON" ]; then travis_retry mvn -B install clean -U -DskipTests=true; fi
 +  - if [ "$TEST_PYTHON" ]; then travis_retry pip install tox --user `whoami`; fi
  
  script:
 -  - travis_retry mvn -B $MAVEN_OVERRIDE install -U
 -  - travis_retry testing/travis/test_wordcount.sh
 +  - if [ "$TEST_PYTHON" ]; then travis_retry $TOX_HOME/tox -e $TOX_ENV -c sdks/python/tox.ini; fi
 +  - if [ ! "$TEST_PYTHON" ]; then travis_retry mvn -B $MAVEN_OVERRIDE install -U; fi
 +  - if [ ! "$TEST_PYTHON" ]; then travis_retry testing/travis/test_wordcount.sh; fi



[48/51] [abbrv] incubator-beam git commit: This closes #757

Posted by ke...@apache.org.
This closes #757


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/2b5c6bcb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/2b5c6bcb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/2b5c6bcb

Branch: refs/heads/python-sdk
Commit: 2b5c6bcb2173d55f6b45f4134cd500d9cc9c1965
Parents: 8daf518 4ac5caf
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Aug 5 10:09:25 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Fri Aug 5 10:09:25 2016 -0700

----------------------------------------------------------------------
 .../beam/sdk/transforms/FlatMapElements.java    | 126 +++++++++++++------
 .../apache/beam/sdk/transforms/MapElements.java |  60 +++++----
 .../beam/sdk/transforms/SimpleFunction.java     |  34 +++++
 .../sdk/transforms/FlatMapElementsTest.java     |  48 +++++++
 .../beam/sdk/transforms/MapElementsTest.java    |  84 +++++++++++++
 5 files changed, 288 insertions(+), 64 deletions(-)
----------------------------------------------------------------------



[34/51] [abbrv] incubator-beam git commit: This closes #711

Posted by ke...@apache.org.
This closes #711


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/fcf6b1d3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/fcf6b1d3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/fcf6b1d3

Branch: refs/heads/python-sdk
Commit: fcf6b1d34e8b9da284e9857fecced2deb5a5ab1d
Parents: 74c5e5e b9543b9
Author: Kenneth Knowles <kl...@google.com>
Authored: Thu Aug 4 12:49:08 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 12:49:08 2016 -0700

----------------------------------------------------------------------
 runners/spark/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------



[47/51] [abbrv] incubator-beam git commit: Remove unneccssary Assignment in TransformExecutor

Posted by ke...@apache.org.
Remove unneccssary Assignment in TransformExecutor


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/acf71d31
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/acf71d31
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/acf71d31

Branch: refs/heads/python-sdk
Commit: acf71d313a43c8f39213ce19277ffebadcc40a77
Parents: 4546fd9
Author: Thomas Groh <tg...@google.com>
Authored: Fri Aug 5 09:59:46 2016 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Fri Aug 5 10:04:21 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/beam/runners/direct/TransformExecutor.java     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/acf71d31/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutor.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutor.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutor.java
index 3db941d..d873bf5 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutor.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutor.java
@@ -118,7 +118,7 @@ class TransformExecutor<T> implements Runnable {
 
       processElements(evaluator, enforcements);
 
-      TransformResult result = finishBundle(evaluator, enforcements);
+      finishBundle(evaluator, enforcements);
     } catch (Throwable t) {
       onComplete.handleThrowable(inputBundle, t);
       if (t instanceof RuntimeException) {


[27/51] [abbrv] incubator-beam git commit: The new DoFn is not @Experimental

Posted by ke...@apache.org.
The new DoFn is not @Experimental


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/e73d163d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/e73d163d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/e73d163d

Branch: refs/heads/python-sdk
Commit: e73d163d5a220e91e88da4055bc45c18bfc9e930
Parents: b8d7559
Author: Kenneth Knowles <kl...@google.com>
Authored: Thu Aug 4 10:01:28 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Thu Aug 4 11:47:31 2016 -0700

----------------------------------------------------------------------
 .../core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java    | 2 --
 1 file changed, 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/e73d163d/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
index eb6753c..6f9a6b6 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
@@ -21,7 +21,6 @@ import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
 
-import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
 import org.apache.beam.sdk.transforms.OldDoFn.DelegatingAggregator;
@@ -83,7 +82,6 @@ import java.util.Map;
  * @param <InputT> the type of the (main) input elements
  * @param <OutputT> the type of the (main) output elements
  */
-@Experimental
 public abstract class DoFn<InputT, OutputT> implements Serializable, HasDisplayData {
 
   /** Information accessible to all methods in this {@code DoFn}. */


[40/51] [abbrv] incubator-beam git commit: Propagate getAllowedTimestampSkew from DoFn to its adapter

Posted by ke...@apache.org.
Propagate getAllowedTimestampSkew from DoFn to its adapter


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/a1c06d71
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/a1c06d71
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/a1c06d71

Branch: refs/heads/python-sdk
Commit: a1c06d71876384722982ec24da1607e41af653d9
Parents: 0b18652
Author: Kenneth Knowles <kl...@google.com>
Authored: Thu Aug 4 14:56:19 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 14:56:42 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/beam/sdk/transforms/DoFnReflector.java     | 6 ++++++
 1 file changed, 6 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a1c06d71/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
index 9bdfde8..c6168b3 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
@@ -72,6 +72,7 @@ import net.bytebuddy.jar.asm.Label;
 import net.bytebuddy.jar.asm.MethodVisitor;
 import net.bytebuddy.jar.asm.Opcodes;
 import net.bytebuddy.matcher.ElementMatchers;
+import org.joda.time.Duration;
 import org.joda.time.Instant;
 
 import java.io.IOException;
@@ -731,6 +732,11 @@ public abstract class DoFnReflector {
     }
 
     @Override
+    public Duration getAllowedTimestampSkew() {
+      return fn.getAllowedTimestampSkew();
+    }
+
+    @Override
     public void populateDisplayData(DisplayData.Builder builder) {
       builder.include(fn);
     }


[44/51] [abbrv] incubator-beam git commit: Use input type in coder inference for MapElements and FlatMapElements

Posted by ke...@apache.org.
Use input type in coder inference for MapElements and FlatMapElements

Previously, the input TypeDescriptor was unknown, so we would fail
to infer a coder for things like MapElements.of(SimpleFunction<T, T>)
even if the input PCollection provided a coder for T.

Now, the input type is plumbed appropriately and the coder is inferred.


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/4ac5cafe
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/4ac5cafe
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/4ac5cafe

Branch: refs/heads/python-sdk
Commit: 4ac5cafe90a371cf616f97cb202d5016b68616d1
Parents: 8daf518
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 29 10:35:01 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 20:18:59 2016 -0700

----------------------------------------------------------------------
 .../beam/sdk/transforms/FlatMapElements.java    | 126 +++++++++++++------
 .../apache/beam/sdk/transforms/MapElements.java |  60 +++++----
 .../beam/sdk/transforms/SimpleFunction.java     |  34 +++++
 .../sdk/transforms/FlatMapElementsTest.java     |  48 +++++++
 .../beam/sdk/transforms/MapElementsTest.java    |  84 +++++++++++++
 5 files changed, 288 insertions(+), 64 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4ac5cafe/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
index 694592e..04d993c 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
@@ -17,8 +17,10 @@
  */
 package org.apache.beam.sdk.transforms;
 
+import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.TypeDescriptor;
+import org.apache.beam.sdk.values.TypeDescriptors;
 
 import java.lang.reflect.ParameterizedType;
 
@@ -45,8 +47,16 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
    * descriptor need not be provided.
    */
   public static <InputT, OutputT> MissingOutputTypeDescriptor<InputT, OutputT>
-  via(SerializableFunction<InputT, ? extends Iterable<OutputT>> fn) {
-    return new MissingOutputTypeDescriptor<>(fn);
+  via(SerializableFunction<? super InputT, ? extends Iterable<OutputT>> fn) {
+
+    // TypeDescriptor interacts poorly with the wildcards needed to correctly express
+    // covariance and contravariance in Java, so instead we cast it to an invariant
+    // function here.
+    @SuppressWarnings("unchecked") // safe covariant cast
+    SerializableFunction<InputT, Iterable<OutputT>> simplerFn =
+        (SerializableFunction<InputT, Iterable<OutputT>>) fn;
+
+    return new MissingOutputTypeDescriptor<>(simplerFn);
   }
 
   /**
@@ -72,16 +82,15 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
    * <p>To use a Java 8 lambda, see {@link #via(SerializableFunction)}.
    */
   public static <InputT, OutputT> FlatMapElements<InputT, OutputT>
-  via(SimpleFunction<InputT, ? extends Iterable<OutputT>> fn) {
-
-    @SuppressWarnings({"rawtypes", "unchecked"}) // safe by static typing
-    TypeDescriptor<Iterable<?>> iterableType = (TypeDescriptor) fn.getOutputTypeDescriptor();
-
-    @SuppressWarnings("unchecked") // safe by correctness of getIterableElementType
-    TypeDescriptor<OutputT> outputType =
-        (TypeDescriptor<OutputT>) getIterableElementType(iterableType);
-
-    return new FlatMapElements<>(fn, outputType);
+  via(SimpleFunction<? super InputT, ? extends Iterable<OutputT>> fn) {
+    // TypeDescriptor interacts poorly with the wildcards needed to correctly express
+    // covariance and contravariance in Java, so instead we cast it to an invariant
+    // function here.
+    @SuppressWarnings("unchecked") // safe covariant cast
+    SimpleFunction<InputT, Iterable<OutputT>> simplerFn =
+        (SimpleFunction<InputT, Iterable<OutputT>>) fn;
+
+    return new FlatMapElements<>(simplerFn, fn.getClass());
   }
 
   /**
@@ -91,18 +100,80 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
    */
   public static final class MissingOutputTypeDescriptor<InputT, OutputT> {
 
-    private final SerializableFunction<InputT, ? extends Iterable<OutputT>> fn;
+    private final SerializableFunction<InputT, Iterable<OutputT>> fn;
 
     private MissingOutputTypeDescriptor(
-        SerializableFunction<InputT, ? extends Iterable<OutputT>> fn) {
+        SerializableFunction<InputT, Iterable<OutputT>> fn) {
       this.fn = fn;
     }
 
     public FlatMapElements<InputT, OutputT> withOutputType(TypeDescriptor<OutputT> outputType) {
-      return new FlatMapElements<>(fn, outputType);
+      TypeDescriptor<Iterable<OutputT>> iterableOutputType = TypeDescriptors.iterables(outputType);
+
+      return new FlatMapElements<>(
+          SimpleFunction.fromSerializableFunctionWithOutputType(fn,
+              iterableOutputType),
+              fn.getClass());
     }
   }
 
+  //////////////////////////////////////////////////////////////////////////////////////////////////
+
+  private final SimpleFunction<InputT, ? extends Iterable<OutputT>> fn;
+  private final DisplayData.Item<?> fnClassDisplayData;
+
+  private FlatMapElements(
+      SimpleFunction<InputT, ? extends Iterable<OutputT>> fn,
+      Class<?> fnClass) {
+    this.fn = fn;
+    this.fnClassDisplayData = DisplayData.item("flatMapFn", fnClass).withLabel("FlatMap Function");
+  }
+
+  @Override
+  public PCollection<OutputT> apply(PCollection<InputT> input) {
+    return input.apply(
+        "FlatMap",
+        ParDo.of(
+            new DoFn<InputT, OutputT>() {
+              private static final long serialVersionUID = 0L;
+
+              @ProcessElement
+              public void processElement(ProcessContext c) {
+                for (OutputT element : fn.apply(c.element())) {
+                  c.output(element);
+                }
+              }
+
+              @Override
+              public TypeDescriptor<InputT> getInputTypeDescriptor() {
+                return fn.getInputTypeDescriptor();
+              }
+
+              @Override
+              public TypeDescriptor<OutputT> getOutputTypeDescriptor() {
+                @SuppressWarnings({"rawtypes", "unchecked"}) // safe by static typing
+                TypeDescriptor<Iterable<?>> iterableType =
+                    (TypeDescriptor) fn.getOutputTypeDescriptor();
+
+                @SuppressWarnings("unchecked") // safe by correctness of getIterableElementType
+                TypeDescriptor<OutputT> outputType =
+                    (TypeDescriptor<OutputT>) getIterableElementType(iterableType);
+
+                return outputType;
+              }
+            }));
+  }
+
+  @Override
+  public void populateDisplayData(DisplayData.Builder builder) {
+    super.populateDisplayData(builder);
+    builder.add(fnClassDisplayData);
+  }
+
+  /**
+   * Does a best-effort job of getting the best {@link TypeDescriptor} for the type of the
+   * elements contained in the iterable described by the given {@link TypeDescriptor}.
+   */
   private static TypeDescriptor<?> getIterableElementType(
       TypeDescriptor<Iterable<?>> iterableTypeDescriptor) {
 
@@ -118,29 +189,4 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
         (ParameterizedType) iterableTypeDescriptor.getSupertype(Iterable.class).getType();
     return TypeDescriptor.of(iterableType.getActualTypeArguments()[0]);
   }
-
-  //////////////////////////////////////////////////////////////////////////////////////////////////
-
-  private final SerializableFunction<InputT, ? extends Iterable<OutputT>> fn;
-  private final transient TypeDescriptor<OutputT> outputType;
-
-  private FlatMapElements(
-      SerializableFunction<InputT, ? extends Iterable<OutputT>> fn,
-      TypeDescriptor<OutputT> outputType) {
-    this.fn = fn;
-    this.outputType = outputType;
-  }
-
-  @Override
-  public PCollection<OutputT> apply(PCollection<InputT> input) {
-    return input.apply("Map", ParDo.of(new DoFn<InputT, OutputT>() {
-      private static final long serialVersionUID = 0L;
-      @ProcessElement
-      public void processElement(ProcessContext c) {
-        for (OutputT element : fn.apply(c.element())) {
-          c.output(element);
-        }
-      }
-    })).setTypeDescriptorInternal(outputType);
-  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4ac5cafe/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
index b7b9a5f..429d3fc 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
@@ -67,9 +67,9 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
    *     }));
    * }</pre>
    */
-  public static <InputT, OutputT> MapElements<InputT, OutputT>
-  via(final SimpleFunction<InputT, OutputT> fn) {
-    return new MapElements<>(fn, fn.getOutputTypeDescriptor());
+  public static <InputT, OutputT> MapElements<InputT, OutputT> via(
+      final SimpleFunction<InputT, OutputT> fn) {
+    return new MapElements<>(fn, fn.getClass());
   }
 
   /**
@@ -85,42 +85,54 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
       this.fn = fn;
     }
 
-    public MapElements<InputT, OutputT> withOutputType(TypeDescriptor<OutputT> outputType) {
-      return new MapElements<>(fn, outputType);
+    public MapElements<InputT, OutputT> withOutputType(final TypeDescriptor<OutputT> outputType) {
+      return new MapElements<>(
+          SimpleFunction.fromSerializableFunctionWithOutputType(fn, outputType), fn.getClass());
     }
+
   }
 
   ///////////////////////////////////////////////////////////////////
 
-  private final SerializableFunction<InputT, OutputT> fn;
-  private final transient TypeDescriptor<OutputT> outputType;
+  private final SimpleFunction<InputT, OutputT> fn;
+  private final DisplayData.Item<?> fnClassDisplayData;
 
-  private MapElements(
-      SerializableFunction<InputT, OutputT> fn,
-      TypeDescriptor<OutputT> outputType) {
+  private MapElements(SimpleFunction<InputT, OutputT> fn, Class<?> fnClass) {
     this.fn = fn;
-    this.outputType = outputType;
+    this.fnClassDisplayData = DisplayData.item("mapFn", fnClass).withLabel("Map Function");
   }
 
   @Override
   public PCollection<OutputT> apply(PCollection<InputT> input) {
-    return input.apply("Map", ParDo.of(new DoFn<InputT, OutputT>() {
-      @ProcessElement
-      public void processElement(ProcessContext c) {
-        c.output(fn.apply(c.element()));
-      }
-
-      @Override
-      public void populateDisplayData(DisplayData.Builder builder) {
-        MapElements.this.populateDisplayData(builder);
-      }
-    })).setTypeDescriptorInternal(outputType);
+    return input.apply(
+        "Map",
+        ParDo.of(
+            new DoFn<InputT, OutputT>() {
+              @ProcessElement
+              public void processElement(ProcessContext c) {
+                c.output(fn.apply(c.element()));
+              }
+
+              @Override
+              public void populateDisplayData(DisplayData.Builder builder) {
+                MapElements.this.populateDisplayData(builder);
+              }
+
+              @Override
+              public TypeDescriptor<InputT> getInputTypeDescriptor() {
+                return fn.getInputTypeDescriptor();
+              }
+
+              @Override
+              public TypeDescriptor<OutputT> getOutputTypeDescriptor() {
+                return fn.getOutputTypeDescriptor();
+              }
+            }));
   }
 
   @Override
   public void populateDisplayData(DisplayData.Builder builder) {
     super.populateDisplayData(builder);
-    builder.add(DisplayData.item("mapFn", fn.getClass())
-      .withLabel("Map Function"));
+    builder.add(fnClassDisplayData);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4ac5cafe/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
index 8894352..6c540cc 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
@@ -27,6 +27,12 @@ import org.apache.beam.sdk.values.TypeDescriptor;
 public abstract class SimpleFunction<InputT, OutputT>
     implements SerializableFunction<InputT, OutputT> {
 
+  public static <InputT, OutputT>
+      SimpleFunction<InputT, OutputT> fromSerializableFunctionWithOutputType(
+          SerializableFunction<InputT, OutputT> fn, TypeDescriptor<OutputT> outputType) {
+    return new SimpleFunctionWithOutputType<>(fn, outputType);
+  }
+
   /**
    * Returns a {@link TypeDescriptor} capturing what is known statically
    * about the input type of this {@code OldDoFn} instance's most-derived
@@ -52,4 +58,32 @@ public abstract class SimpleFunction<InputT, OutputT>
   public TypeDescriptor<OutputT> getOutputTypeDescriptor() {
     return new TypeDescriptor<OutputT>(this) {};
   }
+
+  /**
+   * A {@link SimpleFunction} built from a {@link SerializableFunction}, having
+   * a known output type that is explicitly set.
+   */
+  private static class SimpleFunctionWithOutputType<InputT, OutputT>
+      extends SimpleFunction<InputT, OutputT> {
+
+    private final SerializableFunction<InputT, OutputT> fn;
+    private final TypeDescriptor<OutputT> outputType;
+
+    public SimpleFunctionWithOutputType(
+        SerializableFunction<InputT, OutputT> fn,
+        TypeDescriptor<OutputT> outputType) {
+      this.fn = fn;
+      this.outputType = outputType;
+    }
+
+    @Override
+    public OutputT apply(InputT input) {
+      return fn.apply(input);
+    }
+
+    @Override
+    public TypeDescriptor<OutputT> getOutputTypeDescriptor() {
+      return outputType;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4ac5cafe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsTest.java
index 057fd19..781e143 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsTest.java
@@ -17,6 +17,8 @@
  */
 package org.apache.beam.sdk.transforms;
 
+import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
 import static org.hamcrest.Matchers.equalTo;
 import static org.junit.Assert.assertThat;
 
@@ -24,6 +26,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.TypeDescriptor;
@@ -102,6 +105,51 @@ public class FlatMapElementsTest implements Serializable {
     pipeline.run();
   }
 
+  /**
+   * A {@link SimpleFunction} to test that the coder registry can propagate coders
+   * that are bound to type variables.
+   */
+  private static class PolymorphicSimpleFunction<T> extends SimpleFunction<T, Iterable<T>> {
+    @Override
+    public Iterable<T> apply(T input) {
+      return Collections.<T>emptyList();
+    }
+  }
+
+  /**
+   * Basic test of {@link MapElements} coder propagation with a parametric {@link SimpleFunction}.
+   */
+  @Test
+  public void testPolymorphicSimpleFunction() throws Exception {
+    Pipeline pipeline = TestPipeline.create();
+    PCollection<Integer> output = pipeline
+        .apply(Create.of(1, 2, 3))
+
+        // This is the function that needs to propagate the input T to output T
+        .apply("Polymorphic Identity", MapElements.via(new PolymorphicSimpleFunction<Integer>()))
+
+        // This is a consumer to ensure that all coder inference logic is executed.
+        .apply("Test Consumer", MapElements.via(new SimpleFunction<Iterable<Integer>, Integer>() {
+          @Override
+          public Integer apply(Iterable<Integer> input) {
+            return 42;
+          }
+        }));
+  }
+
+  @Test
+  public void testSimpleFunctionClassDisplayData() {
+    SimpleFunction<Integer, List<Integer>> simpleFn = new SimpleFunction<Integer, List<Integer>>() {
+      @Override
+      public List<Integer> apply(Integer input) {
+        return Collections.emptyList();
+      }
+    };
+
+    FlatMapElements<?, ?> simpleMap = FlatMapElements.via(simpleFn);
+    assertThat(DisplayData.from(simpleMap), hasDisplayItem("flatMapFn", simpleFn.getClass()));
+  }
+
   @Test
   @Category(NeedsRunner.class)
   public void testVoidValues() throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4ac5cafe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
index b4751d2..dbf8844 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
@@ -54,6 +54,29 @@ public class MapElementsTest implements Serializable {
   public transient ExpectedException thrown = ExpectedException.none();
 
   /**
+   * A {@link SimpleFunction} to test that the coder registry can propagate coders
+   * that are bound to type variables.
+   */
+  private static class PolymorphicSimpleFunction<T> extends SimpleFunction<T, T> {
+    @Override
+    public T apply(T input) {
+      return input;
+    }
+  }
+
+  /**
+   * A {@link SimpleFunction} to test that the coder registry can propagate coders
+   * that are bound to type variables, when the variable appears nested in the
+   * output.
+   */
+  private static class NestedPolymorphicSimpleFunction<T> extends SimpleFunction<T, KV<T, String>> {
+    @Override
+    public KV<T, String> apply(T input) {
+      return KV.of(input, "hello");
+    }
+  }
+
+  /**
    * Basic test of {@link MapElements} with a {@link SimpleFunction}.
    */
   @Test
@@ -74,6 +97,55 @@ public class MapElementsTest implements Serializable {
   }
 
   /**
+   * Basic test of {@link MapElements} coder propagation with a parametric {@link SimpleFunction}.
+   */
+  @Test
+  public void testPolymorphicSimpleFunction() throws Exception {
+    Pipeline pipeline = TestPipeline.create();
+    PCollection<Integer> output = pipeline
+        .apply(Create.of(1, 2, 3))
+
+        // This is the function that needs to propagate the input T to output T
+        .apply("Polymorphic Identity", MapElements.via(new PolymorphicSimpleFunction<Integer>()))
+
+        // This is a consumer to ensure that all coder inference logic is executed.
+        .apply("Test Consumer", MapElements.via(new SimpleFunction<Integer, Integer>() {
+          @Override
+          public Integer apply(Integer input) {
+            return input;
+          }
+        }));
+  }
+
+  /**
+   * Test of {@link MapElements} coder propagation with a parametric {@link SimpleFunction}
+   * where the type variable occurs nested within other concrete type constructors.
+   */
+  @Test
+  public void testNestedPolymorphicSimpleFunction() throws Exception {
+    Pipeline pipeline = TestPipeline.create();
+    PCollection<Integer> output =
+        pipeline
+            .apply(Create.of(1, 2, 3))
+
+            // This is the function that needs to propagate the input T to output T
+            .apply(
+                "Polymorphic Identity",
+                MapElements.via(new NestedPolymorphicSimpleFunction<Integer>()))
+
+            // This is a consumer to ensure that all coder inference logic is executed.
+            .apply(
+                "Test Consumer",
+                MapElements.via(
+                    new SimpleFunction<KV<Integer, String>, Integer>() {
+                      @Override
+                      public Integer apply(KV<Integer, String> input) {
+                        return 42;
+                      }
+                    }));
+  }
+
+  /**
    * Basic test of {@link MapElements} with a {@link SerializableFunction}. This style is
    * generally discouraged in Java 7, in favor of {@link SimpleFunction}.
    */
@@ -148,6 +220,18 @@ public class MapElementsTest implements Serializable {
   }
 
   @Test
+  public void testSimpleFunctionClassDisplayData() {
+    SimpleFunction<?, ?> simpleFn = new SimpleFunction<Integer, Integer>() {
+      @Override
+      public Integer apply(Integer input) {
+        return input;
+      }
+    };
+
+    MapElements<?, ?> simpleMap = MapElements.via(simpleFn);
+    assertThat(DisplayData.from(simpleMap), hasDisplayItem("mapFn", simpleFn.getClass()));
+  }
+  @Test
   public void testSimpleFunctionDisplayData() {
     SimpleFunction<?, ?> simpleFn = new SimpleFunction<Integer, Integer>() {
       @Override


[07/51] [abbrv] incubator-beam git commit: Rename DoFn to OldDoFn

Posted by ke...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java
index 2696020..ed9ec10 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java
@@ -25,8 +25,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Aggregator.AggregatorFactory;
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindows;
@@ -58,15 +58,15 @@ import java.util.Set;
  */
 public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<InputT, OutputT> {
 
-  /** The DoFn being run. */
-  public final DoFn<InputT, OutputT> fn;
+  /** The OldDoFn being run. */
+  public final OldDoFn<InputT, OutputT> fn;
 
-  /** The context used for running the DoFn. */
+  /** The context used for running the OldDoFn. */
   public final DoFnContext<InputT, OutputT> context;
 
   protected DoFnRunnerBase(
       PipelineOptions options,
-      DoFn<InputT, OutputT> fn,
+      OldDoFn<InputT, OutputT> fn,
       SideInputReader sideInputReader,
       OutputManager outputManager,
       TupleTag<OutputT> mainOutputTag,
@@ -145,7 +145,7 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
   }
 
   /**
-   * Invokes {@link DoFn#processElement} after certain pre-processings has been done in
+   * Invokes {@link OldDoFn#processElement} after certain pre-processings has been done in
    * {@link DoFnRunnerBase#processElement}.
    */
   protected abstract void invokeProcessElement(WindowedValue<InputT> elem);
@@ -162,17 +162,17 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
   }
 
   /**
-   * A concrete implementation of {@code DoFn.Context} used for running a {@link DoFn}.
+   * A concrete implementation of {@code OldDoFn.Context} used for running a {@link OldDoFn}.
    *
-   * @param <InputT> the type of the DoFn's (main) input elements
-   * @param <OutputT> the type of the DoFn's (main) output elements
+   * @param <InputT> the type of the OldDoFn's (main) input elements
+   * @param <OutputT> the type of the OldDoFn's (main) output elements
    */
   private static class DoFnContext<InputT, OutputT>
-      extends DoFn<InputT, OutputT>.Context {
+      extends OldDoFn<InputT, OutputT>.Context {
     private static final int MAX_SIDE_OUTPUTS = 1000;
 
     final PipelineOptions options;
-    final DoFn<InputT, OutputT> fn;
+    final OldDoFn<InputT, OutputT> fn;
     final SideInputReader sideInputReader;
     final OutputManager outputManager;
     final TupleTag<OutputT> mainOutputTag;
@@ -187,7 +187,7 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
     private Set<TupleTag<?>> outputTags;
 
     public DoFnContext(PipelineOptions options,
-                       DoFn<InputT, OutputT> fn,
+                       OldDoFn<InputT, OutputT> fn,
                        SideInputReader sideInputReader,
                        OutputManager outputManager,
                        TupleTag<OutputT> mainOutputTag,
@@ -317,8 +317,8 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
     }
 
     // Following implementations of output, outputWithTimestamp, and sideOutput
-    // are only accessible in DoFn.startBundle and DoFn.finishBundle, and will be shadowed by
-    // ProcessContext's versions in DoFn.processElement.
+    // are only accessible in OldDoFn.startBundle and OldDoFn.finishBundle, and will be shadowed by
+    // ProcessContext's versions in OldDoFn.processElement.
     @Override
     public void output(OutputT output) {
       outputWindowedValue(output, null, null, PaneInfo.NO_FIRING);
@@ -350,9 +350,10 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
   }
 
   /**
-   * Returns a new {@code DoFn.ProcessContext} for the given element.
+   * Returns a new {@code OldDoFn.ProcessContext} for the given element.
    */
-  protected DoFn<InputT, OutputT>.ProcessContext createProcessContext(WindowedValue<InputT> elem) {
+  protected OldDoFn<InputT, OutputT>.ProcessContext createProcessContext(
+      WindowedValue<InputT> elem) {
     return new DoFnProcessContext<InputT, OutputT>(fn, context, elem);
   }
 
@@ -365,21 +366,21 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
   }
 
   /**
-   * A concrete implementation of {@code DoFn.ProcessContext} used for
-   * running a {@link DoFn} over a single element.
+   * A concrete implementation of {@code OldDoFn.ProcessContext} used for
+   * running a {@link OldDoFn} over a single element.
    *
-   * @param <InputT> the type of the DoFn's (main) input elements
-   * @param <OutputT> the type of the DoFn's (main) output elements
+   * @param <InputT> the type of the OldDoFn's (main) input elements
+   * @param <OutputT> the type of the OldDoFn's (main) output elements
    */
   static class DoFnProcessContext<InputT, OutputT>
-      extends DoFn<InputT, OutputT>.ProcessContext {
+      extends OldDoFn<InputT, OutputT>.ProcessContext {
 
 
-    final DoFn<InputT, OutputT> fn;
+    final OldDoFn<InputT, OutputT> fn;
     final DoFnContext<InputT, OutputT> context;
     final WindowedValue<InputT> windowedValue;
 
-    public DoFnProcessContext(DoFn<InputT, OutputT> fn,
+    public DoFnProcessContext(OldDoFn<InputT, OutputT> fn,
                               DoFnContext<InputT, OutputT> context,
                               WindowedValue<InputT> windowedValue) {
       fn.super();
@@ -426,7 +427,8 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
     public BoundedWindow window() {
       if (!(fn instanceof RequiresWindowAccess)) {
         throw new UnsupportedOperationException(
-            "window() is only available in the context of a DoFn marked as RequiresWindowAccess.");
+            "window() is only available in the context of a OldDoFn marked as"
+                + "RequiresWindowAccess.");
       }
       return Iterables.getOnlyElement(windows());
     }
@@ -484,7 +486,7 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
         throw new IllegalArgumentException(String.format(
             "Cannot output with timestamp %s. Output timestamps must be no earlier than the "
             + "timestamp of the current input (%s) minus the allowed skew (%s). See the "
-            + "DoFn#getAllowedTimestampSkew() Javadoc for details on changing the allowed skew.",
+            + "OldDoFn#getAllowedTimestampSkew() Javadoc for details on changing the allowed skew.",
             timestamp, windowedValue.getTimestamp(),
             PeriodFormat.getDefault().print(fn.getAllowedTimestampSkew().toPeriod())));
       }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java
index cb96da2..a9f3cf4 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java
@@ -19,7 +19,7 @@ package org.apache.beam.sdk.util;
 
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Aggregator.AggregatorFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.DoFnRunner.ReduceFnExecutor;
 import org.apache.beam.sdk.util.ExecutionContext.StepContext;
@@ -27,6 +27,7 @@ import org.apache.beam.sdk.util.common.CounterSet;
 import org.apache.beam.sdk.util.common.CounterSet.AddCounterMutator;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.TupleTag;
+
 import java.util.List;
 
 /**
@@ -44,13 +45,13 @@ public class DoFnRunners {
   }
 
   /**
-   * Returns a basic implementation of {@link DoFnRunner} that works for most {@link DoFn DoFns}.
+   * Returns a basic implementation of {@link DoFnRunner} that works for most {@link OldDoFn DoFns}.
    *
-   * <p>It invokes {@link DoFn#processElement} for each input.
+   * <p>It invokes {@link OldDoFn#processElement} for each input.
    */
   public static <InputT, OutputT> DoFnRunner<InputT, OutputT> simpleRunner(
       PipelineOptions options,
-      DoFn<InputT, OutputT> fn,
+      OldDoFn<InputT, OutputT> fn,
       SideInputReader sideInputReader,
       OutputManager outputManager,
       TupleTag<OutputT> mainOutputTag,
@@ -71,13 +72,14 @@ public class DoFnRunners {
   }
 
   /**
-   * Returns a basic implementation of {@link DoFnRunner} that works for most {@link DoFn DoFns}.
+   * Returns a basic implementation of {@link DoFnRunner} that works for most
+   * {@link OldDoFn OldDoFns}.
    *
-   * <p>It invokes {@link DoFn#processElement} for each input.
+   * <p>It invokes {@link OldDoFn#processElement} for each input.
    */
   public static <InputT, OutputT> DoFnRunner<InputT, OutputT> simpleRunner(
       PipelineOptions options,
-      DoFn<InputT, OutputT> fn,
+      OldDoFn<InputT, OutputT> fn,
       SideInputReader sideInputReader,
       OutputManager outputManager,
       TupleTag<OutputT> mainOutputTag,
@@ -99,7 +101,7 @@ public class DoFnRunners {
   /**
    * Returns an implementation of {@link DoFnRunner} that handles late data dropping.
    *
-   * <p>It drops elements from expired windows before they reach the underlying {@link DoFn}.
+   * <p>It drops elements from expired windows before they reach the underlying {@link OldDoFn}.
    */
   public static <K, InputT, OutputT, W extends BoundedWindow>
       DoFnRunner<KeyedWorkItem<K, InputT>, KV<K, OutputT>> lateDataDroppingRunner(
@@ -133,7 +135,7 @@ public class DoFnRunners {
   /**
    * Returns an implementation of {@link DoFnRunner} that handles late data dropping.
    *
-   * <p>It drops elements from expired windows before they reach the underlying {@link DoFn}.
+   * <p>It drops elements from expired windows before they reach the underlying {@link OldDoFn}.
    */
   public static <K, InputT, OutputT, W extends BoundedWindow>
   DoFnRunner<KeyedWorkItem<K, InputT>, KV<K, OutputT>> lateDataDroppingRunner(
@@ -160,7 +162,7 @@ public class DoFnRunners {
 
   public static <InputT, OutputT> DoFnRunner<InputT, OutputT> createDefault(
       PipelineOptions options,
-      DoFn<InputT, OutputT> doFn,
+      OldDoFn<InputT, OutputT> doFn,
       SideInputReader sideInputReader,
       OutputManager outputManager,
       TupleTag<OutputT> mainOutputTag,
@@ -198,7 +200,7 @@ public class DoFnRunners {
 
   public static <InputT, OutputT> DoFnRunner<InputT, OutputT> createDefault(
       PipelineOptions options,
-      DoFn<InputT, OutputT> doFn,
+      OldDoFn<InputT, OutputT> doFn,
       SideInputReader sideInputReader,
       OutputManager outputManager,
       TupleTag<OutputT> mainOutputTag,

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsDoFn.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsDoFn.java
index b575559..f82e5df 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsDoFn.java
@@ -19,14 +19,14 @@ package org.apache.beam.sdk.util;
 
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.state.StateInternalsFactory;
 import org.apache.beam.sdk.values.KV;
 
 /**
- * DoFn that merges windows and groups elements in those windows, optionally
+ * OldDoFn that merges windows and groups elements in those windows, optionally
  * combining values.
  *
  * @param <K> key type
@@ -36,7 +36,7 @@ import org.apache.beam.sdk.values.KV;
  */
 @SystemDoFnInternal
 public abstract class GroupAlsoByWindowsDoFn<K, InputT, OutputT, W extends BoundedWindow>
-    extends DoFn<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> {
+    extends OldDoFn<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> {
   public static final String DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER = "DroppedDueToClosedWindow";
   public static final String DROPPED_DUE_TO_LATENESS_COUNTER = "DroppedDueToLateness";
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java
index d185a24..f872ffc 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java
@@ -17,7 +17,7 @@
  */
 package org.apache.beam.sdk.util;
 
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.state.StateInternals;
 import org.apache.beam.sdk.util.state.StateInternalsFactory;
@@ -52,7 +52,7 @@ public class GroupAlsoByWindowsViaOutputBufferDoFn<K, InputT, OutputT, W extends
 
   @Override
   public void processElement(
-      DoFn<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>>.ProcessContext c)
+      OldDoFn<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>>.ProcessContext c)
           throws Exception {
     K key = c.element().getKey();
     // Used with Batch, we know that all the data is available for this key. We can't use the

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java
index 8a0152e..f0f9007 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java
@@ -22,8 +22,8 @@ import static com.google.common.base.Preconditions.checkArgument;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.IterableCoder;
 import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -138,7 +138,9 @@ public class GroupByKeyViaGroupByKeyOnly<K, V>
       return input
           .apply(
               ParDo.of(
-                  new DoFn<KV<K, Iterable<WindowedValue<V>>>, KV<K, Iterable<WindowedValue<V>>>>() {
+                  new OldDoFn<
+                      KV<K, Iterable<WindowedValue<V>>>,
+                      KV<K, Iterable<WindowedValue<V>>>>() {
                     @Override
                     public void processElement(ProcessContext c) {
                       KV<K, Iterable<WindowedValue<V>>> kvs = c.element();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java
index 4815162..8b3ba24 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java
@@ -18,7 +18,7 @@
 package org.apache.beam.sdk.util;
 
 import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.values.KV;
 
@@ -31,7 +31,7 @@ import org.joda.time.Instant;
 
 /**
  * A customized {@link DoFnRunner} that handles late data dropping for
- * a {@link KeyedWorkItem} input {@link DoFn}.
+ * a {@link KeyedWorkItem} input {@link OldDoFn}.
  *
  * <p>It expands windows before checking data lateness.
  *

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java
index 812e99a..0c5849e 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java
@@ -32,7 +32,6 @@ import org.apache.beam.sdk.util.state.ValueState;
 import com.google.common.annotations.VisibleForTesting;
 
 import org.joda.time.Instant;
-
 import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java
index c879409..1fa0830 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java
@@ -22,7 +22,7 @@ import static com.google.common.base.Preconditions.checkState;
 
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
@@ -181,7 +181,7 @@ public class ReduceFnRunner<K, InputT, OutputT, W extends BoundedWindow> {
    * Store the previously emitted pane (if any) for each window.
    *
    * <ul>
-   * <li>State: The previous {@link PaneInfo} passed to the user's {@link DoFn#processElement},
+   * <li>State: The previous {@link PaneInfo} passed to the user's {@link OldDoFn#processElement},
    * if any.
    * <li>Style style: DIRECT
    * <li>Merging: Always keyed by actual window, so does not depend on {@link #activeWindows}.

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java
index e034638..a0cdb40 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java
@@ -19,21 +19,21 @@ package org.apache.beam.sdk.util;
 
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Aggregator.AggregatorFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.util.DoFnRunners.OutputManager;
 import org.apache.beam.sdk.util.ExecutionContext.StepContext;
 import org.apache.beam.sdk.values.TupleTag;
 import java.util.List;
 
 /**
- * Runs a {@link DoFn} by constructing the appropriate contexts and passing them in.
+ * Runs a {@link OldDoFn} by constructing the appropriate contexts and passing them in.
  *
- * @param <InputT> the type of the DoFn's (main) input elements
- * @param <OutputT> the type of the DoFn's (main) output elements
+ * @param <InputT> the type of the OldDoFn's (main) input elements
+ * @param <OutputT> the type of the OldDoFn's (main) output elements
  */
 public class SimpleDoFnRunner<InputT, OutputT> extends DoFnRunnerBase<InputT, OutputT>{
 
-  protected SimpleDoFnRunner(PipelineOptions options, DoFn<InputT, OutputT> fn,
+  protected SimpleDoFnRunner(PipelineOptions options, OldDoFn<InputT, OutputT> fn,
       SideInputReader sideInputReader,
       OutputManager outputManager,
       TupleTag<OutputT> mainOutputTag, List<TupleTag<?>> sideOutputTags, StepContext stepContext,
@@ -44,7 +44,7 @@ public class SimpleDoFnRunner<InputT, OutputT> extends DoFnRunnerBase<InputT, Ou
 
   @Override
   protected void invokeProcessElement(WindowedValue<InputT> elem) {
-    final DoFn<InputT, OutputT>.ProcessContext processContext = createProcessContext(elem);
+    final OldDoFn<InputT, OutputT>.ProcessContext processContext = createProcessContext(elem);
     // This can contain user code. Wrap it in case it throws an exception.
     try {
       fn.processElement(processContext);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java
index 985f210..5c17009 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java
@@ -37,7 +37,6 @@ import org.joda.time.Duration;
 import org.joda.time.Instant;
 
 import java.io.Serializable;
-
 import javax.annotation.Nullable;
 
 import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java
index dc2413a..8d604cb 100644
--- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java
+++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java
@@ -21,6 +21,7 @@ import static org.apache.beam.sdk.WindowMatchers.isSingleWindowedValue;
 import static org.apache.beam.sdk.WindowMatchers.isWindowedValue;
 
 import static com.google.common.base.Preconditions.checkArgument;
+
 import static org.hamcrest.Matchers.contains;
 import static org.hamcrest.Matchers.containsInAnyOrder;
 import static org.hamcrest.Matchers.emptyIterable;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java
index e0ff879..feba191 100644
--- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java
+++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.util;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
+
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java
index fb74fc6..f0c52b9 100644
--- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java
+++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java
@@ -20,7 +20,7 @@ package org.apache.beam.sdk.util;
 import static org.hamcrest.Matchers.is;
 import static org.mockito.Mockito.mock;
 
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.util.BaseExecutionContext.StepContext;
 import org.apache.beam.sdk.values.TupleTag;
 
@@ -62,7 +62,7 @@ public class SimpleDoFnRunnerTest {
     runner.processElement(WindowedValue.valueInGlobalWindow("anyValue"));
   }
 
-  private DoFnRunner<String, String> createRunner(DoFn<String, String> fn) {
+  private DoFnRunner<String, String> createRunner(OldDoFn<String, String> fn) {
     // Pass in only necessary parameters for the test
     List<TupleTag<?>> sideOutputTags = Arrays.asList();
     StepContext context = mock(StepContext.class);
@@ -70,7 +70,7 @@ public class SimpleDoFnRunnerTest {
           null, fn, null, null, null, sideOutputTags, context, null, null);
   }
 
-  static class ThrowingDoFn extends DoFn<String, String> {
+  static class ThrowingDoFn extends OldDoFn<String, String> {
     final Exception exceptionToThrow =
         new UnsupportedOperationException("Expected exception");
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
index 477da30..e052226 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
@@ -23,7 +23,7 @@ import org.apache.beam.runners.direct.DirectGroupByKey.DirectGroupAlsoByWindow;
 import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.GroupByKeyViaGroupByKeyOnly;
@@ -106,7 +106,7 @@ class GroupAlsoByWindowEvaluatorFactory implements TransformEvaluatorFactory {
 
       StateInternals<K> stateInternals = (StateInternals<K>) stepContext.stateInternals();
 
-      DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> gabwDoFn =
+      OldDoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> gabwDoFn =
           GroupAlsoByWindowViaWindowSetDoFn.create(
               windowingStrategy,
               new ConstantStateInternalsFactory<K>(stateInternals),

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java
index dcbe3d1..8be12fd 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java
@@ -23,7 +23,7 @@ import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
 import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.CoderException;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.util.IllegalMutationException;
 import org.apache.beam.sdk.util.MutationDetector;
 import org.apache.beam.sdk.util.MutationDetectors;
@@ -42,7 +42,7 @@ import org.joda.time.Instant;
  * elements added to the bundle will be encoded by the {@link Coder} of the underlying
  * {@link PCollection}.
  *
- * <p>This catches errors during the execution of a {@link DoFn} caused by modifying an element
+ * <p>This catches errors during the execution of a {@link OldDoFn} caused by modifying an element
  * after it is added to an output {@link PCollection}.
  */
 class ImmutabilityCheckingBundleFactory implements BundleFactory {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
index dd1cf37..6ef0ffe 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
@@ -21,7 +21,7 @@ import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext;
 import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
 import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.util.DoFnRunner;
 import org.apache.beam.sdk.util.DoFnRunners;
 import org.apache.beam.sdk.util.DoFnRunners.OutputManager;
@@ -48,7 +48,7 @@ class ParDoEvaluator<T> implements TransformEvaluator<T> {
       DirectStepContext stepContext,
       CommittedBundle<InputT> inputBundle,
       AppliedPTransform<PCollection<InputT>, ?, ?> application,
-      DoFn<InputT, OutputT> fn,
+      OldDoFn<InputT, OutputT> fn,
       List<PCollectionView<?>> sideInputs,
       TupleTag<OutputT> mainOutputTag,
       List<TupleTag<?>> sideOutputTags,

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java
index eda3db4..ce770ca 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.direct;
 import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext;
 import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo.BoundMulti;
 import org.apache.beam.sdk.values.PCollection;
@@ -38,7 +38,7 @@ import java.util.Map;
  * {@link BoundMulti} primitive {@link PTransform}.
  */
 class ParDoMultiEvaluatorFactory implements TransformEvaluatorFactory {
-  private final LoadingCache<AppliedPTransform<?, ?, BoundMulti<?, ?>>, ThreadLocal<DoFn<?, ?>>>
+  private final LoadingCache<AppliedPTransform<?, ?, BoundMulti<?, ?>>, ThreadLocal<OldDoFn<?, ?>>>
       fnClones;
 
   public ParDoMultiEvaluatorFactory() {
@@ -46,9 +46,10 @@ class ParDoMultiEvaluatorFactory implements TransformEvaluatorFactory {
         CacheBuilder.newBuilder()
             .build(
                 new CacheLoader<
-                    AppliedPTransform<?, ?, BoundMulti<?, ?>>, ThreadLocal<DoFn<?, ?>>>() {
+                    AppliedPTransform<?, ?, BoundMulti<?, ?>>, ThreadLocal<OldDoFn<?, ?>>>() {
                   @Override
-                  public ThreadLocal<DoFn<?, ?>> load(AppliedPTransform<?, ?, BoundMulti<?, ?>> key)
+                  public ThreadLocal<OldDoFn<?, ?>> load(
+                      AppliedPTransform<?, ?, BoundMulti<?, ?>> key)
                       throws Exception {
                     @SuppressWarnings({"unchecked", "rawtypes"})
                     ThreadLocal threadLocal =
@@ -76,7 +77,7 @@ class ParDoMultiEvaluatorFactory implements TransformEvaluatorFactory {
     Map<TupleTag<?>, PCollection<?>> outputs = application.getOutput().getAll();
 
     @SuppressWarnings({"unchecked", "rawtypes"})
-    ThreadLocal<DoFn<InT, OuT>> fnLocal =
+    ThreadLocal<OldDoFn<InT, OuT>> fnLocal =
         (ThreadLocal) fnClones.getUnchecked((AppliedPTransform) application);
     String stepName = evaluationContext.getStepName(application);
     DirectStepContext stepContext =

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java
index 044abdc..53af6af 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.direct;
 import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext;
 import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo.Bound;
 import org.apache.beam.sdk.values.PCollection;
@@ -38,16 +38,17 @@ import java.util.Collections;
  * {@link Bound ParDo.Bound} primitive {@link PTransform}.
  */
 class ParDoSingleEvaluatorFactory implements TransformEvaluatorFactory {
-  private final LoadingCache<AppliedPTransform<?, ?, Bound<?, ?>>, ThreadLocal<DoFn<?, ?>>>
+  private final LoadingCache<AppliedPTransform<?, ?, Bound<?, ?>>, ThreadLocal<OldDoFn<?, ?>>>
       fnClones;
 
   public ParDoSingleEvaluatorFactory() {
     fnClones =
         CacheBuilder.newBuilder()
             .build(
-                new CacheLoader<AppliedPTransform<?, ?, Bound<?, ?>>, ThreadLocal<DoFn<?, ?>>>() {
+                new CacheLoader<
+                    AppliedPTransform<?, ?, Bound<?, ?>>, ThreadLocal<OldDoFn<?, ?>>>() {
                   @Override
-                  public ThreadLocal<DoFn<?, ?>> load(AppliedPTransform<?, ?, Bound<?, ?>> key)
+                  public ThreadLocal<OldDoFn<?, ?>> load(AppliedPTransform<?, ?, Bound<?, ?>> key)
                       throws Exception {
                     @SuppressWarnings({"unchecked", "rawtypes"})
                     ThreadLocal threadLocal =
@@ -80,7 +81,7 @@ class ParDoSingleEvaluatorFactory implements TransformEvaluatorFactory {
             .getOrCreateStepContext(stepName, stepName);
 
     @SuppressWarnings({"unchecked", "rawtypes"})
-    ThreadLocal<DoFn<InputT, OutputT>> fnLocal =
+    ThreadLocal<OldDoFn<InputT, OutputT>> fnLocal =
         (ThreadLocal) fnClones.getUnchecked((AppliedPTransform) application);
     try {
       ParDoEvaluator<InputT> parDoEvaluator =

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
index 7fac1e3..d021b43 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
@@ -21,7 +21,7 @@ import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.Read;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 
 import javax.annotation.Nullable;
@@ -38,8 +38,8 @@ public interface TransformEvaluatorFactory {
    * Create a new {@link TransformEvaluator} for the application of the {@link PTransform}.
    *
    * <p>Any work that must be done before input elements are processed (such as calling
-   * {@link DoFn#startBundle(DoFn.Context)}) must be done before the {@link TransformEvaluator} is
-   * made available to the caller.
+   * {@link OldDoFn#startBundle(OldDoFn.Context)}) must be done before the
+   * {@link TransformEvaluator} is made available to the caller.
    *
    * <p>May return null if the application cannot produce an evaluator (for example, it is a
    * {@link Read} {@link PTransform} where all evaluators are in-use).

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
index d6ee6ea..cee4001 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
@@ -23,9 +23,9 @@ import static com.google.common.base.Preconditions.checkArgument;
 import org.apache.beam.sdk.io.Write;
 import org.apache.beam.sdk.io.Write.Bound;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Values;
@@ -101,7 +101,7 @@ class WriteWithShardingFactory implements PTransformOverrideFactory {
   }
 
   @VisibleForTesting
-  static class KeyBasedOnCountFn<T> extends DoFn<T, KV<Integer, T>> {
+  static class KeyBasedOnCountFn<T> extends OldDoFn<T, KV<Integer, T>> {
     @VisibleForTesting
     static final int MIN_SHARDS_FOR_LOG = 3;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java
index 353eef6..529316c 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.io.CountingInput;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.View;
@@ -62,9 +62,9 @@ public class ConsumerTrackingPipelineVisitorTest implements Serializable {
         p.apply("listCreate", Create.of("foo", "bar"))
             .apply(
                 ParDo.of(
-                    new DoFn<String, String>() {
+                    new OldDoFn<String, String>() {
                       @Override
-                      public void processElement(DoFn<String, String>.ProcessContext c)
+                      public void processElement(OldDoFn<String, String>.ProcessContext c)
                           throws Exception {
                         c.output(Integer.toString(c.element().length()));
                       }
@@ -109,9 +109,9 @@ public class ConsumerTrackingPipelineVisitorTest implements Serializable {
     PCollection<String> transformed =
         created.apply(
             ParDo.of(
-                new DoFn<String, String>() {
+                new OldDoFn<String, String>() {
                   @Override
-                  public void processElement(DoFn<String, String>.ProcessContext c)
+                  public void processElement(OldDoFn<String, String>.ProcessContext c)
                       throws Exception {
                     c.output(Integer.toString(c.element().length()));
                   }
@@ -140,9 +140,9 @@ public class ConsumerTrackingPipelineVisitorTest implements Serializable {
     PCollection<String> transformed =
         created.apply(
             ParDo.of(
-                new DoFn<String, String>() {
+                new OldDoFn<String, String>() {
                   @Override
-                  public void processElement(DoFn<String, String>.ProcessContext c)
+                  public void processElement(OldDoFn<String, String>.ProcessContext c)
                       throws Exception {
                     c.output(Integer.toString(c.element().length()));
                   }
@@ -157,9 +157,9 @@ public class ConsumerTrackingPipelineVisitorTest implements Serializable {
     p.apply(Create.of("1", "2", "3"))
         .apply(
             ParDo.of(
-                new DoFn<String, String>() {
+                new OldDoFn<String, String>() {
                   @Override
-                  public void processElement(DoFn<String, String>.ProcessContext c)
+                  public void processElement(OldDoFn<String, String>.ProcessContext c)
                       throws Exception {
                     c.output(Integer.toString(c.element().length()));
                   }
@@ -182,9 +182,9 @@ public class ConsumerTrackingPipelineVisitorTest implements Serializable {
     PCollection<String> transformed =
         created.apply(
             ParDo.of(
-                new DoFn<String, String>() {
+                new OldDoFn<String, String>() {
                   @Override
-                  public void processElement(DoFn<String, String>.ProcessContext c)
+                  public void processElement(OldDoFn<String, String>.ProcessContext c)
                       throws Exception {
                     c.output(Integer.toString(c.element().length()));
                   }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
index 09707bd..29dea32 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
@@ -32,9 +32,9 @@ import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
 import org.apache.beam.sdk.transforms.SimpleFunction;
@@ -159,7 +159,7 @@ public class DirectRunnerTest implements Serializable {
 
   @Test
   public void transformDisplayDataExceptionShouldFail() {
-    DoFn<Integer, Integer> brokenDoFn = new DoFn<Integer, Integer>() {
+    OldDoFn<Integer, Integer> brokenDoFn = new OldDoFn<Integer, Integer>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {}
 
@@ -211,7 +211,7 @@ public class DirectRunnerTest implements Serializable {
 
 
   /**
-   * Tests that a {@link DoFn} that mutates an output with a good equals() fails in the
+   * Tests that a {@link OldDoFn} that mutates an output with a good equals() fails in the
    * {@link DirectRunner}.
    */
   @Test
@@ -220,7 +220,7 @@ public class DirectRunnerTest implements Serializable {
 
     pipeline
         .apply(Create.of(42))
-        .apply(ParDo.of(new DoFn<Integer, List<Integer>>() {
+        .apply(ParDo.of(new OldDoFn<Integer, List<Integer>>() {
           @Override public void processElement(ProcessContext c) {
             List<Integer> outputList = Arrays.asList(1, 2, 3, 4);
             c.output(outputList);
@@ -236,7 +236,7 @@ public class DirectRunnerTest implements Serializable {
   }
 
   /**
-   * Tests that a {@link DoFn} that mutates an output with a good equals() fails in the
+   * Tests that a {@link OldDoFn} that mutates an output with a good equals() fails in the
    * {@link DirectRunner}.
    */
   @Test
@@ -245,7 +245,7 @@ public class DirectRunnerTest implements Serializable {
 
     pipeline
         .apply(Create.of(42))
-        .apply(ParDo.of(new DoFn<Integer, List<Integer>>() {
+        .apply(ParDo.of(new OldDoFn<Integer, List<Integer>>() {
           @Override public void processElement(ProcessContext c) {
             List<Integer> outputList = Arrays.asList(1, 2, 3, 4);
             c.output(outputList);
@@ -260,7 +260,7 @@ public class DirectRunnerTest implements Serializable {
   }
 
   /**
-   * Tests that a {@link DoFn} that mutates an output with a bad equals() still fails
+   * Tests that a {@link OldDoFn} that mutates an output with a bad equals() still fails
    * in the {@link DirectRunner}.
    */
   @Test
@@ -269,7 +269,7 @@ public class DirectRunnerTest implements Serializable {
 
     pipeline
         .apply(Create.of(42))
-        .apply(ParDo.of(new DoFn<Integer, byte[]>() {
+        .apply(ParDo.of(new OldDoFn<Integer, byte[]>() {
           @Override public void processElement(ProcessContext c) {
             byte[] outputArray = new byte[]{0x1, 0x2, 0x3};
             c.output(outputArray);
@@ -285,7 +285,7 @@ public class DirectRunnerTest implements Serializable {
   }
 
   /**
-   * Tests that a {@link DoFn} that mutates its input with a good equals() fails in the
+   * Tests that a {@link OldDoFn} that mutates its input with a good equals() fails in the
    * {@link DirectRunner}.
    */
   @Test
@@ -295,7 +295,7 @@ public class DirectRunnerTest implements Serializable {
     pipeline
         .apply(Create.of(Arrays.asList(1, 2, 3), Arrays.asList(4, 5, 6))
             .withCoder(ListCoder.of(VarIntCoder.of())))
-        .apply(ParDo.of(new DoFn<List<Integer>, Integer>() {
+        .apply(ParDo.of(new OldDoFn<List<Integer>, Integer>() {
           @Override public void processElement(ProcessContext c) {
             List<Integer> inputList = c.element();
             inputList.set(0, 37);
@@ -310,7 +310,7 @@ public class DirectRunnerTest implements Serializable {
   }
 
   /**
-   * Tests that a {@link DoFn} that mutates an input with a bad equals() still fails
+   * Tests that a {@link OldDoFn} that mutates an input with a bad equals() still fails
    * in the {@link DirectRunner}.
    */
   @Test
@@ -319,7 +319,7 @@ public class DirectRunnerTest implements Serializable {
 
     pipeline
         .apply(Create.of(new byte[]{0x1, 0x2, 0x3}, new byte[]{0x4, 0x5, 0x6}))
-        .apply(ParDo.of(new DoFn<byte[], Integer>() {
+        .apply(ParDo.of(new OldDoFn<byte[], Integer>() {
           @Override public void processElement(ProcessContext c) {
             byte[] inputArray = c.element();
             inputArray[0] = 0xa;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java
index d40cf93..db934e5 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.coders.ByteArrayCoder;
 import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -213,9 +213,9 @@ public class ImmutabilityCheckingBundleFactoryTest {
     CommittedBundle<byte[]> committed = intermediate.commit(Instant.now());
   }
 
-  private static class IdentityDoFn<T> extends DoFn<T, T> {
+  private static class IdentityDoFn<T> extends OldDoFn<T, T> {
     @Override
-    public void processElement(DoFn<T, T>.ProcessContext c) throws Exception {
+    public void processElement(OldDoFn<T, T>.ProcessContext c) throws Exception {
       c.output(c.element());
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java
index 890e06d..e1be120 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java
@@ -22,7 +22,7 @@ import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.IllegalMutationException;
 import org.apache.beam.sdk.util.WindowedValue;
@@ -59,9 +59,9 @@ public class ImmutabilityEnforcementFactoryTest implements Serializable {
         p.apply(Create.of("foo".getBytes(), "spamhameggs".getBytes()))
             .apply(
                 ParDo.of(
-                    new DoFn<byte[], byte[]>() {
+                    new OldDoFn<byte[], byte[]>() {
                       @Override
-                      public void processElement(DoFn<byte[], byte[]>.ProcessContext c)
+                      public void processElement(OldDoFn<byte[], byte[]>.ProcessContext c)
                           throws Exception {
                         c.element()[0] = 'b';
                       }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java
index aa0d976..9e273ad 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java
@@ -28,9 +28,9 @@ import org.apache.beam.sdk.coders.VarIntCoder;
 import org.apache.beam.sdk.coders.VoidCoder;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.Keys;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.KV;
@@ -180,9 +180,9 @@ public class KeyedPValueTrackingVisitorTest {
     }
   }
 
-  private static class IdentityFn<K> extends DoFn<K, K> {
+  private static class IdentityFn<K> extends OldDoFn<K, K> {
     @Override
-    public void processElement(DoFn<K, K>.ProcessContext c) throws Exception {
+    public void processElement(OldDoFn<K, K>.ProcessContext c) throws Exception {
       c.output(c.element());
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
index 07f478d..3208841 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
@@ -30,7 +30,7 @@ import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.View;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -169,7 +169,7 @@ public class ParDoEvaluatorTest {
         ImmutableMap.<TupleTag<?>, PCollection<?>>of(mainOutputTag, output));
   }
 
-  private static class RecorderFn extends DoFn<Integer, Integer> {
+  private static class RecorderFn extends OldDoFn<Integer, Integer> {
     private Collection<Integer> processed;
     private final PCollectionView<Integer> view;
 
@@ -179,7 +179,7 @@ public class ParDoEvaluatorTest {
     }
 
     @Override
-    public void processElement(DoFn<Integer, Integer>.ProcessContext c) throws Exception {
+    public void processElement(OldDoFn<Integer, Integer>.ProcessContext c) throws Exception {
       processed.add(c.element());
       c.output(c.element() + c.sideInput(view));
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java
index c0ab4df..19094cb 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java
@@ -31,7 +31,7 @@ import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate;
 import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.ParDo.BoundMulti;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -80,7 +80,7 @@ public class ParDoMultiEvaluatorFactoryTest implements Serializable {
 
     BoundMulti<String, KV<String, Integer>> pardo =
         ParDo.of(
-                new DoFn<String, KV<String, Integer>>() {
+                new OldDoFn<String, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.output(KV.<String, Integer>of(c.element(), c.element().length()));
@@ -170,7 +170,7 @@ public class ParDoMultiEvaluatorFactoryTest implements Serializable {
 
     BoundMulti<String, KV<String, Integer>> pardo =
         ParDo.of(
-                new DoFn<String, KV<String, Integer>>() {
+                new OldDoFn<String, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.output(KV.<String, Integer>of(c.element(), c.element().length()));
@@ -254,7 +254,7 @@ public class ParDoMultiEvaluatorFactoryTest implements Serializable {
         StateNamespaces.window(GlobalWindow.Coder.INSTANCE, GlobalWindow.INSTANCE);
     BoundMulti<String, KV<String, Integer>> pardo =
         ParDo.of(
-                new DoFn<String, KV<String, Integer>>() {
+                new OldDoFn<String, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.windowingInternals()
@@ -354,7 +354,7 @@ public class ParDoMultiEvaluatorFactoryTest implements Serializable {
 
     BoundMulti<String, KV<String, Integer>> pardo =
         ParDo.of(
-                new DoFn<String, KV<String, Integer>>() {
+                new OldDoFn<String, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.windowingInternals().stateInternals();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java
index d778da6..a4fd570 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java
@@ -31,7 +31,7 @@ import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate;
 import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
@@ -73,7 +73,7 @@ public class ParDoSingleEvaluatorFactoryTest implements Serializable {
     PCollection<Integer> collection =
         input.apply(
             ParDo.of(
-                new DoFn<String, Integer>() {
+                new OldDoFn<String, Integer>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.output(c.element().length());
@@ -127,7 +127,7 @@ public class ParDoSingleEvaluatorFactoryTest implements Serializable {
     PCollection<Integer> collection =
         input.apply(
             ParDo.of(
-                new DoFn<String, Integer>() {
+                new OldDoFn<String, Integer>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.sideOutput(sideOutputTag, c.element().length());
@@ -179,7 +179,7 @@ public class ParDoSingleEvaluatorFactoryTest implements Serializable {
         StateNamespaces.window(GlobalWindow.Coder.INSTANCE, GlobalWindow.INSTANCE);
     ParDo.Bound<String, KV<String, Integer>> pardo =
         ParDo.of(
-            new DoFn<String, KV<String, Integer>>() {
+            new OldDoFn<String, KV<String, Integer>>() {
               @Override
               public void processElement(ProcessContext c) {
                 c.windowingInternals()
@@ -265,7 +265,7 @@ public class ParDoSingleEvaluatorFactoryTest implements Serializable {
 
     ParDo.Bound<String, KV<String, Integer>> pardo =
         ParDo.of(
-            new DoFn<String, KV<String, Integer>>() {
+            new OldDoFn<String, KV<String, Integer>>() {
               @Override
               public void processElement(ProcessContext c) {
                 c.windowingInternals().stateInternals();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java
index 7c7005c..22f148a 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java
@@ -38,9 +38,9 @@ import org.apache.beam.sdk.coders.VarLongCoder;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Filter;
 import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.WithKeys;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -76,7 +76,6 @@ import java.util.Collections;
 import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.Map;
-
 import javax.annotation.Nullable;
 
 /**
@@ -105,9 +104,9 @@ public class WatermarkManagerTest implements Serializable {
     createdInts = p.apply("createdInts", Create.of(1, 2, 3));
 
     filtered = createdInts.apply("filtered", Filter.greaterThan(1));
-    filteredTimesTwo = filtered.apply("timesTwo", ParDo.of(new DoFn<Integer, Integer>() {
+    filteredTimesTwo = filtered.apply("timesTwo", ParDo.of(new OldDoFn<Integer, Integer>() {
       @Override
-      public void processElement(DoFn<Integer, Integer>.ProcessContext c) throws Exception {
+      public void processElement(OldDoFn<Integer, Integer>.ProcessContext c) throws Exception {
         c.output(c.element() * 2);
       }
     }));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java
index 56737a4..716c8ad 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java
@@ -32,7 +32,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.Keys;
 import org.apache.beam.sdk.transforms.PTransform;
@@ -230,7 +230,7 @@ public class TFIDF {
       // Create a collection of pairs mapping a URI to each
       // of the words in the document associated with that that URI.
       PCollection<KV<URI, String>> uriToWords = uriToContent
-          .apply("SplitWords", ParDo.of(new DoFn<KV<URI, String>, KV<URI, String>>() {
+          .apply("SplitWords", ParDo.of(new OldDoFn<KV<URI, String>, KV<URI, String>>() {
             private static final long serialVersionUID = 0;
 
             @Override
@@ -275,7 +275,7 @@ public class TFIDF {
       // by the URI key.
       PCollection<KV<URI, KV<String, Long>>> uriToWordAndCount = uriAndWordToCount
           .apply("ShiftKeys", ParDo.of(
-              new DoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
+              new OldDoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
                 private static final long serialVersionUID = 0;
 
                 @Override
@@ -316,7 +316,7 @@ public class TFIDF {
       // divided by the total number of words in the document.
       PCollection<KV<String, KV<URI, Double>>> wordToUriAndTf = uriToWordAndCountAndTotal
           .apply("ComputeTermFrequencies", ParDo.of(
-              new DoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+              new OldDoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
                 private static final long serialVersionUID = 0;
 
                 @Override
@@ -339,11 +339,11 @@ public class TFIDF {
       // documents in which the word appears divided by the total
       // number of documents in the corpus. Note how the total number of
       // documents is passed as a side input; the same value is
-      // presented to each invocation of the DoFn.
+      // presented to each invocation of the OldDoFn.
       PCollection<KV<String, Double>> wordToDf = wordToDocCount
           .apply("ComputeDocFrequencies", ParDo
               .withSideInputs(totalDocuments)
-              .of(new DoFn<KV<String, Long>, KV<String, Double>>() {
+              .of(new OldDoFn<KV<String, Long>, KV<String, Double>>() {
                 private static final long serialVersionUID = 0;
 
                 @Override
@@ -375,7 +375,7 @@ public class TFIDF {
 
       return wordToUriAndTfAndDf
           .apply("ComputeTfIdf", ParDo.of(
-              new DoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+              new OldDoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
                 private static final long serialVersionUID = 0;
 
                 @Override
@@ -416,7 +416,7 @@ public class TFIDF {
     @Override
     public PDone apply(PCollection<KV<String, KV<URI, Double>>> wordToUriAndTfIdf) {
       return wordToUriAndTfIdf
-          .apply("Format", ParDo.of(new DoFn<KV<String, KV<URI, Double>>, String>() {
+          .apply("Format", ParDo.of(new OldDoFn<KV<String, KV<URI, Double>>, String>() {
             private static final long serialVersionUID = 0;
 
             @Override

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java
index c54229d..080cdc9 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.MapElements;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
@@ -38,7 +38,7 @@ import org.apache.beam.sdk.values.PCollection;
 
 public class WordCount {
 
-  public static class ExtractWordsFn extends DoFn<String, String> {
+  public static class ExtractWordsFn extends OldDoFn<String, String> {
     private final Aggregator<Long, Long> emptyLines =
         createAggregator("emptyLines", new Sum.SumLongFn());
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java
index c0ff85d..068404a 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java
@@ -29,7 +29,7 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.Filter;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.PTransform;
@@ -92,7 +92,7 @@ public class AutoComplete {
 
         // Map the KV outputs of Count into our own CompletionCandiate class.
         .apply("CreateCompletionCandidates", ParDo.of(
-            new DoFn<KV<String, Long>, CompletionCandidate>() {
+            new OldDoFn<KV<String, Long>, CompletionCandidate>() {
               private static final long serialVersionUID = 0;
 
               @Override
@@ -182,7 +182,7 @@ public class AutoComplete {
     }
 
     private static class FlattenTops
-        extends DoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
+        extends OldDoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
       private static final long serialVersionUID = 0;
 
       @Override
@@ -236,10 +236,10 @@ public class AutoComplete {
   }
 
   /**
-   * A DoFn that keys each candidate by all its prefixes.
+   * A OldDoFn that keys each candidate by all its prefixes.
    */
   private static class AllPrefixes
-      extends DoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
+      extends OldDoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
     private static final long serialVersionUID = 0;
 
     private final int minPrefix;
@@ -314,7 +314,7 @@ public class AutoComplete {
     }
   }
 
-  static class ExtractWordsFn extends DoFn<String, String> {
+  static class ExtractWordsFn extends OldDoFn<String, String> {
     private final Aggregator<Long, Long> emptyLines =
             createAggregator("emptyLines", new Sum.SumLongFn());
 
@@ -340,8 +340,8 @@ public class AutoComplete {
    * Takes as input a the top candidates per prefix, and emits an entity
    * suitable for writing to Datastore.
    */
-  static class FormatForPerTaskLocalFile extends DoFn<KV<String, List<CompletionCandidate>>, String>
-          implements DoFn.RequiresWindowAccess{
+  static class FormatForPerTaskLocalFile extends OldDoFn<KV<String, List<CompletionCandidate>>, String>
+          implements OldDoFn.RequiresWindowAccess{
 
     private static final long serialVersionUID = 0;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java
index f456b27..7d7c0c7 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.Read;
 import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.join.CoGbkResult;
 import org.apache.beam.sdk.transforms.join.CoGroupByKey;
@@ -76,7 +76,7 @@ public class JoinExamples {
     // country code 'key' -> string of <event info>, <country name>
     PCollection<KV<String, String>> finalResultCollection =
         kvpCollection.apply("Process", ParDo.of(
-            new DoFn<KV<String, CoGbkResult>, KV<String, String>>() {
+            new OldDoFn<KV<String, CoGbkResult>, KV<String, String>>() {
               private static final long serialVersionUID = 0;
 
               @Override
@@ -98,7 +98,7 @@ public class JoinExamples {
             }));
 
     return finalResultCollection
-        .apply("Format", ParDo.of(new DoFn<KV<String, String>, String>() {
+        .apply("Format", ParDo.of(new OldDoFn<KV<String, String>, String>() {
           private static final long serialVersionUID = 0;
 
           @Override
@@ -110,7 +110,7 @@ public class JoinExamples {
         }));
   }
 
-  static class ExtractEventDataFn extends DoFn<String, KV<String, String>> {
+  static class ExtractEventDataFn extends OldDoFn<String, KV<String, String>> {
     private static final long serialVersionUID = 0;
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java
index 8756abe..395b409 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java
@@ -30,7 +30,7 @@ import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
@@ -326,7 +326,7 @@ public class KafkaIOExamples {
    * Print contents to stdout
    * @param <T> type of the input
    */
-  private static class PrintFn<T> extends DoFn<T, T> {
+  private static class PrintFn<T> extends OldDoFn<T, T> {
 
     @Override
     public void processElement(ProcessContext c) throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java
index 4e81420..8c31783 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
@@ -49,7 +49,7 @@ public class KafkaWindowedWordCountExample {
   static final String GROUP_ID = "myGroup";  // Default groupId
   static final String ZOOKEEPER = "localhost:2181";  // Default zookeeper to connect to for Kafka
 
-  public static class ExtractWordsFn extends DoFn<String, String> {
+  public static class ExtractWordsFn extends OldDoFn<String, String> {
     private final Aggregator<Long, Long> emptyLines =
         createAggregator("emptyLines", new Sum.SumLongFn());
 
@@ -71,7 +71,7 @@ public class KafkaWindowedWordCountExample {
     }
   }
 
-  public static class FormatAsStringFn extends DoFn<KV<String, Long>, String> {
+  public static class FormatAsStringFn extends OldDoFn<KV<String, Long>, String> {
     @Override
     public void processElement(ProcessContext c) {
       String row = c.element().getKey() + " - " + c.element().getValue() + " @ " + c.timestamp().toString();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java
index 1b532a7..d149e4e 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
@@ -59,7 +59,7 @@ public class WindowedWordCount {
   static final long WINDOW_SIZE = 10;  // Default window duration in seconds
   static final long SLIDE_SIZE = 5;  // Default window slide in seconds
 
-  static class FormatAsStringFn extends DoFn<KV<String, Long>, String> {
+  static class FormatAsStringFn extends OldDoFn<KV<String, Long>, String> {
     @Override
     public void processElement(ProcessContext c) {
       String row = c.element().getKey() + " - " + c.element().getValue() + " @ " + c.timestamp().toString();
@@ -67,7 +67,7 @@ public class WindowedWordCount {
     }
   }
 
-  static class ExtractWordsFn extends DoFn<String, String> {
+  static class ExtractWordsFn extends OldDoFn<String, String> {
     private final Aggregator<Long, Long> emptyLines =
         createAggregator("emptyLines", new Sum.SumLongFn());
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java
index 0bba0d0..01a3ab2 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java
@@ -39,7 +39,7 @@ import org.apache.beam.sdk.io.BoundedSource;
 import org.apache.beam.sdk.io.Read;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.CombineFnBase;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.PTransform;
@@ -391,7 +391,7 @@ class FlinkBatchTransformTranslators {
           inputDataSet.groupBy(new KvKeySelector<InputT, K>(inputCoder.getKeyCoder()));
 
       // construct a map from side input to WindowingStrategy so that
-      // the DoFn runner can map main-input windows to side input windows
+      // the OldDoFn runner can map main-input windows to side input windows
       Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputStrategies = new HashMap<>();
       for (PCollectionView<?> sideInput: transform.getSideInputs()) {
         sideInputStrategies.put(sideInput, sideInput.getWindowingStrategyInternal());
@@ -494,7 +494,7 @@ class FlinkBatchTransformTranslators {
       DataSet<WindowedValue<InputT>> inputDataSet =
           context.getInputDataSet(context.getInput(transform));
 
-      final DoFn<InputT, OutputT> doFn = transform.getFn();
+      final OldDoFn<InputT, OutputT> doFn = transform.getFn();
 
       TypeInformation<WindowedValue<OutputT>> typeInformation =
           context.getTypeInfo(context.getOutput(transform));
@@ -502,7 +502,7 @@ class FlinkBatchTransformTranslators {
       List<PCollectionView<?>> sideInputs = transform.getSideInputs();
 
       // construct a map from side input to WindowingStrategy so that
-      // the DoFn runner can map main-input windows to side input windows
+      // the OldDoFn runner can map main-input windows to side input windows
       Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputStrategies = new HashMap<>();
       for (PCollectionView<?> sideInput: sideInputs) {
         sideInputStrategies.put(sideInput, sideInput.getWindowingStrategyInternal());
@@ -539,7 +539,7 @@ class FlinkBatchTransformTranslators {
       DataSet<WindowedValue<InputT>> inputDataSet =
           context.getInputDataSet(context.getInput(transform));
 
-      final DoFn<InputT, OutputT> doFn = transform.getFn();
+      final OldDoFn<InputT, OutputT> doFn = transform.getFn();
 
       Map<TupleTag<?>, PCollection<?>> outputs = context.getOutput(transform).getAll();
 
@@ -578,7 +578,7 @@ class FlinkBatchTransformTranslators {
       List<PCollectionView<?>> sideInputs = transform.getSideInputs();
 
       // construct a map from side input to WindowingStrategy so that
-      // the DoFn runner can map main-input windows to side input windows
+      // the OldDoFn runner can map main-input windows to side input windows
       Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputStrategies = new HashMap<>();
       for (PCollectionView<?> sideInput: sideInputs) {
         sideInputStrategies.put(sideInput, sideInput.getWindowingStrategyInternal());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java
index fa6b387..5b55d42 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java
@@ -35,11 +35,10 @@ import org.apache.beam.sdk.io.BoundedSource;
 import org.apache.beam.sdk.io.Read;
 import org.apache.beam.sdk.io.Sink;
 import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.io.UnboundedSource;
 import org.apache.beam.sdk.io.Write;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.PTransform;
@@ -71,8 +70,6 @@ import org.apache.flink.streaming.api.datastream.DataStreamSink;
 import org.apache.flink.streaming.api.datastream.KeyedStream;
 import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
 import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
-import org.apache.flink.streaming.api.functions.IngestionTimeExtractor;
-import org.apache.flink.streaming.api.watermark.Watermark;
 import org.apache.flink.util.Collector;
 import org.joda.time.Instant;
 import org.slf4j.Logger;
@@ -346,8 +343,8 @@ public class FlinkStreamingTransformTranslators {
       context.setOutputDataStream(context.getOutput(transform), windowedStream);
     }
 
-    private static <T, W extends BoundedWindow> DoFn<T, T> createWindowAssigner(final WindowFn<T, W> windowFn) {
-      return new DoFn<T, T>() {
+    private static <T, W extends BoundedWindow> OldDoFn<T, T> createWindowAssigner(final WindowFn<T, W> windowFn) {
+      return new OldDoFn<T, T>() {
 
         @Override
         public void processElement(final ProcessContext c) throws Exception {



[49/51] [abbrv] incubator-beam git commit: This closes #795

Posted by ke...@apache.org.
This closes #795


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/c584b37b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/c584b37b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/c584b37b

Branch: refs/heads/python-sdk
Commit: c584b37b8ac4e863bad83a766b6871ccc7135334
Parents: 2b5c6bc acf71d3
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Aug 5 10:40:17 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Fri Aug 5 10:40:17 2016 -0700

----------------------------------------------------------------------
 .../beam/runners/direct/EvaluationContext.java  | 14 ++++++---
 .../direct/ExecutorServiceParallelExecutor.java |  5 ++--
 .../beam/runners/direct/TransformExecutor.java  |  2 +-
 .../beam/runners/direct/DirectRunnerTest.java   | 31 ++++++++++++++++++++
 4 files changed, 44 insertions(+), 8 deletions(-)
----------------------------------------------------------------------



[09/51] [abbrv] incubator-beam git commit: Closes #758

Posted by ke...@apache.org.
Closes #758


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/9a329aad
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/9a329aad
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/9a329aad

Branch: refs/heads/python-sdk
Commit: 9a329aada82d3ca7f619b88eddea04bdd329d992
Parents: 388816a 3466a0e
Author: Dan Halperin <dh...@google.com>
Authored: Wed Aug 3 18:25:53 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700

----------------------------------------------------------------------
 .../beam/examples/DebuggingWordCount.java       |   2 +-
 .../apache/beam/examples/MinimalWordCount.java  |   2 +-
 .../apache/beam/examples/WindowedWordCount.java |   4 +-
 .../org/apache/beam/examples/WordCount.java     |   2 +-
 .../examples/common/PubsubFileInjector.java     |   6 +-
 .../beam/examples/complete/AutoComplete.java    |  14 +-
 .../examples/complete/StreamingWordExtract.java |  12 +-
 .../apache/beam/examples/complete/TfIdf.java    |  16 +-
 .../examples/complete/TopWikipediaSessions.java |  12 +-
 .../examples/complete/TrafficMaxLaneFlow.java   |  10 +-
 .../beam/examples/complete/TrafficRoutes.java   |  12 +-
 .../examples/cookbook/BigQueryTornadoes.java    |   6 +-
 .../cookbook/CombinePerKeyExamples.java         |   6 +-
 .../examples/cookbook/DatastoreWordCount.java   |  11 +-
 .../beam/examples/cookbook/FilterExamples.java  |  12 +-
 .../beam/examples/cookbook/JoinExamples.java    |  10 +-
 .../examples/cookbook/MaxPerKeyExamples.java    |   6 +-
 .../beam/examples/cookbook/TriggerExample.java  |  12 +-
 .../org/apache/beam/examples/WordCountTest.java |   2 +-
 .../examples/complete/AutoCompleteTest.java     |   4 +-
 .../examples/cookbook/TriggerExampleTest.java   |   4 +-
 .../beam/examples/complete/game/GameStats.java  |  10 +-
 .../beam/examples/complete/game/UserScore.java  |   4 +-
 .../complete/game/utils/WriteToBigQuery.java    |  12 +-
 .../game/utils/WriteWindowedToBigQuery.java     |   8 +-
 .../examples/complete/game/UserScoreTest.java   |   2 +-
 .../core/GroupAlsoByWindowViaWindowSetDoFn.java |  12 +-
 .../core/UnboundedReadFromBoundedSource.java    |   2 +-
 .../apache/beam/sdk/util/AssignWindowsDoFn.java |  10 +-
 .../org/apache/beam/sdk/util/DoFnRunner.java    |  21 +-
 .../apache/beam/sdk/util/DoFnRunnerBase.java    |  54 +-
 .../org/apache/beam/sdk/util/DoFnRunners.java   |  24 +-
 .../beam/sdk/util/GroupAlsoByWindowsDoFn.java   |   6 +-
 .../GroupAlsoByWindowsViaOutputBufferDoFn.java  |   4 +-
 .../sdk/util/GroupByKeyViaGroupByKeyOnly.java   |   6 +-
 .../sdk/util/LateDataDroppingDoFnRunner.java    |   4 +-
 .../apache/beam/sdk/util/PaneInfoTracker.java   |   1 -
 .../apache/beam/sdk/util/ReduceFnRunner.java    |   4 +-
 .../apache/beam/sdk/util/SimpleDoFnRunner.java  |  12 +-
 .../org/apache/beam/sdk/util/WatermarkHold.java |   1 -
 .../beam/sdk/util/ReduceFnRunnerTest.java       |   1 +
 .../apache/beam/sdk/util/ReduceFnTester.java    |   1 +
 .../beam/sdk/util/SimpleDoFnRunnerTest.java     |   6 +-
 .../GroupAlsoByWindowEvaluatorFactory.java      |   4 +-
 .../ImmutabilityCheckingBundleFactory.java      |   4 +-
 .../beam/runners/direct/ParDoEvaluator.java     |   4 +-
 .../direct/ParDoMultiEvaluatorFactory.java      |  11 +-
 .../direct/ParDoSingleEvaluatorFactory.java     |  11 +-
 .../direct/TransformEvaluatorFactory.java       |   6 +-
 .../direct/WriteWithShardingFactory.java        |   4 +-
 .../ConsumerTrackingPipelineVisitorTest.java    |  22 +-
 .../beam/runners/direct/DirectRunnerTest.java   |  24 +-
 .../ImmutabilityCheckingBundleFactoryTest.java  |   6 +-
 .../ImmutabilityEnforcementFactoryTest.java     |   6 +-
 .../direct/KeyedPValueTrackingVisitorTest.java  |   6 +-
 .../beam/runners/direct/ParDoEvaluatorTest.java |   6 +-
 .../direct/ParDoMultiEvaluatorFactoryTest.java  |  10 +-
 .../direct/ParDoSingleEvaluatorFactoryTest.java |  10 +-
 .../runners/direct/WatermarkManagerTest.java    |   7 +-
 .../beam/runners/flink/examples/TFIDF.java      |  16 +-
 .../beam/runners/flink/examples/WordCount.java  |   4 +-
 .../flink/examples/streaming/AutoComplete.java  |  16 +-
 .../flink/examples/streaming/JoinExamples.java  |   8 +-
 .../examples/streaming/KafkaIOExamples.java     |   4 +-
 .../KafkaWindowedWordCountExample.java          |   6 +-
 .../examples/streaming/WindowedWordCount.java   |   6 +-
 .../FlinkBatchTransformTranslators.java         |  12 +-
 .../FlinkStreamingTransformTranslators.java     |   9 +-
 .../functions/FlinkDoFnFunction.java            |  10 +-
 .../FlinkMergingNonShuffleReduceFunction.java   |   8 +-
 .../functions/FlinkMultiOutputDoFnFunction.java |  10 +-
 .../FlinkMultiOutputProcessContext.java         |   6 +-
 .../functions/FlinkNoElementAssignContext.java  |   8 +-
 .../functions/FlinkPartialReduceFunction.java   |   8 +-
 .../functions/FlinkProcessContext.java          |  16 +-
 .../functions/FlinkReduceFunction.java          |   8 +-
 .../streaming/FlinkAbstractParDoWrapper.java    |  18 +-
 .../FlinkGroupAlsoByWindowWrapper.java          |  10 +-
 .../streaming/FlinkParDoBoundMultiWrapper.java  |   4 +-
 .../streaming/FlinkParDoBoundWrapper.java       |   4 +-
 .../state/AbstractFlinkTimerInternals.java      |   4 +-
 .../beam/runners/flink/PipelineOptionsTest.java |   6 +-
 .../beam/runners/flink/ReadSourceITCase.java    |   4 +-
 .../flink/ReadSourceStreamingITCase.java        |   4 +-
 .../flink/streaming/GroupByNullKeyTest.java     |   8 +-
 .../streaming/TopWikipediaSessionsITCase.java   |   6 +-
 .../dataflow/DataflowPipelineTranslator.java    |   6 +-
 .../beam/runners/dataflow/DataflowRunner.java   |  87 ++-
 .../dataflow/internal/AssignWindows.java        |   6 +-
 .../beam/runners/dataflow/util/DoFnInfo.java    |  16 +-
 .../DataflowPipelineTranslatorTest.java         |  10 +-
 .../beam/runners/spark/examples/WordCount.java  |   4 +-
 .../runners/spark/translation/DoFnFunction.java |   8 +-
 .../spark/translation/MultiDoFnFunction.java    |   8 +-
 .../spark/translation/SparkProcessContext.java  |  18 +-
 .../spark/translation/TransformTranslator.java  |   7 +-
 .../streaming/StreamingTransformTranslator.java |   4 +-
 .../apache/beam/runners/spark/TfIdfTest.java    |  12 +-
 .../spark/translation/CombinePerKeyTest.java    |   4 +-
 .../spark/translation/DoFnOutputTest.java       |   4 +-
 .../translation/MultiOutputWordCountTest.java   |   8 +-
 .../spark/translation/SerializationTest.java    |  10 +-
 .../spark/translation/SideEffectsTest.java      |   4 +-
 .../streaming/KafkaStreamingTest.java           |   4 +-
 .../org/apache/beam/sdk/coders/AvroCoder.java   |   1 -
 .../apache/beam/sdk/coders/DurationCoder.java   |   1 -
 .../apache/beam/sdk/coders/InstantCoder.java    |   1 -
 .../java/org/apache/beam/sdk/io/PubsubIO.java   |   6 +-
 .../apache/beam/sdk/io/PubsubUnboundedSink.java |   8 +-
 .../beam/sdk/io/PubsubUnboundedSource.java      |   4 +-
 .../java/org/apache/beam/sdk/io/Source.java     |   2 +-
 .../main/java/org/apache/beam/sdk/io/Write.java |  21 +-
 .../org/apache/beam/sdk/options/GcpOptions.java |   1 -
 .../beam/sdk/options/PipelineOptions.java       |   8 +-
 .../sdk/options/PipelineOptionsFactory.java     |   1 -
 .../sdk/options/PipelineOptionsReflector.java   |   1 +
 .../beam/sdk/runners/AggregatorValues.java      |   4 +-
 .../org/apache/beam/sdk/testing/PAssert.java    |  24 +-
 .../beam/sdk/testing/SerializableMatchers.java  |   1 -
 .../apache/beam/sdk/testing/TestPipeline.java   |   1 -
 .../beam/sdk/testing/TestPipelineOptions.java   |   1 +
 .../apache/beam/sdk/transforms/Aggregator.java  |  14 +-
 .../sdk/transforms/AggregatorRetriever.java     |   6 +-
 .../org/apache/beam/sdk/transforms/Combine.java |  14 +-
 .../apache/beam/sdk/transforms/CombineFns.java  |   4 +-
 .../org/apache/beam/sdk/transforms/Count.java   |   2 +-
 .../org/apache/beam/sdk/transforms/Create.java  |   2 +-
 .../org/apache/beam/sdk/transforms/DoFn.java    | 418 +++++---------
 .../beam/sdk/transforms/DoFnReflector.java      | 116 ++--
 .../apache/beam/sdk/transforms/DoFnTester.java  |  88 +--
 .../beam/sdk/transforms/DoFnWithContext.java    | 429 --------------
 .../org/apache/beam/sdk/transforms/Filter.java  |   2 +-
 .../beam/sdk/transforms/FlatMapElements.java    |   2 +-
 .../org/apache/beam/sdk/transforms/Flatten.java |   2 +-
 .../apache/beam/sdk/transforms/GroupByKey.java  |   2 +-
 .../transforms/IntraBundleParallelization.java  |  40 +-
 .../org/apache/beam/sdk/transforms/Keys.java    |   2 +-
 .../org/apache/beam/sdk/transforms/KvSwap.java  |   2 +-
 .../apache/beam/sdk/transforms/MapElements.java |   2 +-
 .../org/apache/beam/sdk/transforms/OldDoFn.java | 565 +++++++++++++++++++
 .../apache/beam/sdk/transforms/PTransform.java  |   2 +-
 .../org/apache/beam/sdk/transforms/ParDo.java   | 219 +++----
 .../apache/beam/sdk/transforms/Partition.java   |   2 +-
 .../beam/sdk/transforms/RemoveDuplicates.java   |   2 +-
 .../org/apache/beam/sdk/transforms/Sample.java  |   4 +-
 .../beam/sdk/transforms/SimpleFunction.java     |   6 +-
 .../org/apache/beam/sdk/transforms/Values.java  |   2 +-
 .../org/apache/beam/sdk/transforms/View.java    |   8 +-
 .../apache/beam/sdk/transforms/WithKeys.java    |   2 +-
 .../beam/sdk/transforms/WithTimestamps.java     |   4 +-
 .../sdk/transforms/display/DisplayData.java     |   1 -
 .../beam/sdk/transforms/join/CoGbkResult.java   |   1 -
 .../beam/sdk/transforms/join/CoGroupByKey.java  |  14 +-
 .../sdk/transforms/windowing/AfterEach.java     |   1 +
 .../windowing/AfterProcessingTime.java          |   1 +
 .../transforms/windowing/IntervalWindow.java    |   1 -
 .../beam/sdk/transforms/windowing/Never.java    |   1 +
 .../beam/sdk/transforms/windowing/PaneInfo.java |  10 +-
 .../beam/sdk/transforms/windowing/Window.java   |   4 +-
 .../beam/sdk/util/BaseExecutionContext.java     |   4 +-
 .../apache/beam/sdk/util/BucketingFunction.java |   1 +
 .../beam/sdk/util/CombineContextFactory.java    |   6 +-
 .../apache/beam/sdk/util/ExecutionContext.java  |   8 +-
 .../apache/beam/sdk/util/MovingFunction.java    |   1 +
 .../beam/sdk/util/PerKeyCombineFnRunner.java    |  44 +-
 .../beam/sdk/util/PerKeyCombineFnRunners.java   |  30 +-
 .../org/apache/beam/sdk/util/PubsubClient.java  |   1 +
 .../apache/beam/sdk/util/PubsubTestClient.java  |   1 +
 .../sdk/util/ReifyTimestampAndWindowsDoFn.java  |   6 +-
 .../org/apache/beam/sdk/util/Reshuffle.java     |   4 +-
 .../apache/beam/sdk/util/SerializableUtils.java |   2 +-
 .../org/apache/beam/sdk/util/StringUtils.java   |   2 +-
 .../beam/sdk/util/SystemDoFnInternal.java       |   6 +-
 .../apache/beam/sdk/util/TimerInternals.java    |   1 -
 .../apache/beam/sdk/util/ValueWithRecordId.java |   6 +-
 .../org/apache/beam/sdk/util/WindowedValue.java |   1 -
 .../beam/sdk/util/WindowingInternals.java       |   4 +-
 .../beam/sdk/util/common/ReflectHelpers.java    |   1 +
 .../beam/sdk/values/TimestampedValue.java       |   1 -
 .../java/org/apache/beam/sdk/PipelineTest.java  |   6 +-
 .../apache/beam/sdk/coders/AvroCoderTest.java   |   4 +-
 .../beam/sdk/coders/CoderRegistryTest.java      |   6 +-
 .../beam/sdk/coders/SerializableCoderTest.java  |   6 +-
 .../org/apache/beam/sdk/io/AvroSourceTest.java  |   1 +
 .../io/BoundedReadFromUnboundedSourceTest.java  |   1 +
 .../beam/sdk/io/CompressedSourceTest.java       |   1 +
 .../apache/beam/sdk/io/CountingInputTest.java   |   5 +-
 .../apache/beam/sdk/io/CountingSourceTest.java  |   4 +-
 .../beam/sdk/io/OffsetBasedSourceTest.java      |   1 +
 .../beam/sdk/io/PubsubUnboundedSinkTest.java    |   4 +-
 .../java/org/apache/beam/sdk/io/ReadTest.java   |   1 +
 .../java/org/apache/beam/sdk/io/TextIOTest.java |   1 +
 .../java/org/apache/beam/sdk/io/WriteTest.java  |   7 +-
 .../org/apache/beam/sdk/io/XmlSinkTest.java     |   1 +
 .../apache/beam/sdk/options/GcpOptionsTest.java |   1 +
 .../sdk/options/GoogleApiDebugOptionsTest.java  |   1 -
 .../sdk/options/PipelineOptionsFactoryTest.java |   1 -
 .../beam/sdk/options/PipelineOptionsTest.java   |   1 -
 .../sdk/options/ProxyInvocationHandlerTest.java |   2 +-
 .../AggregatorPipelineExtractorTest.java        |   6 +-
 .../apache/beam/sdk/testing/PAssertTest.java    |   1 -
 .../beam/sdk/testing/TestPipelineTest.java      |   1 -
 .../transforms/ApproximateQuantilesTest.java    |   1 +
 .../sdk/transforms/ApproximateUniqueTest.java   |   5 +-
 .../beam/sdk/transforms/CombineFnsTest.java     |   2 +-
 .../apache/beam/sdk/transforms/CombineTest.java |  12 +-
 .../apache/beam/sdk/transforms/CreateTest.java  |   2 +-
 .../beam/sdk/transforms/DoFnContextTest.java    |  69 ---
 .../DoFnDelegatingAggregatorTest.java           |  16 +-
 .../beam/sdk/transforms/DoFnReflectorTest.java  |  88 +--
 .../apache/beam/sdk/transforms/DoFnTest.java    | 131 +++--
 .../beam/sdk/transforms/DoFnTesterTest.java     |  10 +-
 .../sdk/transforms/DoFnWithContextTest.java     | 237 --------
 .../apache/beam/sdk/transforms/FlattenTest.java |   4 +-
 .../beam/sdk/transforms/GroupByKeyTest.java     |   6 +-
 .../IntraBundleParallelizationTest.java         |  23 +-
 .../beam/sdk/transforms/MapElementsTest.java    |   1 +
 .../org/apache/beam/sdk/transforms/MaxTest.java |   1 +
 .../org/apache/beam/sdk/transforms/MinTest.java |   2 +
 .../apache/beam/sdk/transforms/NoOpDoFn.java    | 144 -----
 .../apache/beam/sdk/transforms/NoOpOldDoFn.java | 144 +++++
 .../beam/sdk/transforms/OldDoFnContextTest.java |  69 +++
 .../apache/beam/sdk/transforms/OldDoFnTest.java | 242 ++++++++
 .../apache/beam/sdk/transforms/ParDoTest.java   | 108 ++--
 .../beam/sdk/transforms/PartitionTest.java      |   1 +
 .../apache/beam/sdk/transforms/SampleTest.java  |   1 +
 .../org/apache/beam/sdk/transforms/TopTest.java |   1 +
 .../apache/beam/sdk/transforms/ViewTest.java    | 398 ++++++-------
 .../beam/sdk/transforms/WithTimestampsTest.java |   8 +-
 .../display/DisplayDataEvaluatorTest.java       |   6 +-
 .../display/DisplayDataMatchersTest.java        |   1 +
 .../sdk/transforms/display/DisplayDataTest.java |   6 +-
 .../dofnreflector/DoFnReflectorTestHelper.java  |  26 +-
 .../sdk/transforms/join/CoGroupByKeyTest.java   |  18 +-
 .../sdk/transforms/windowing/NeverTest.java     |   1 +
 .../sdk/transforms/windowing/WindowTest.java    |   6 +-
 .../sdk/transforms/windowing/WindowingTest.java |  10 +-
 .../beam/sdk/util/BucketingFunctionTest.java    |   4 +-
 .../beam/sdk/util/MovingFunctionTest.java       |   4 +-
 .../beam/sdk/util/SerializableUtilsTest.java    |   1 -
 .../apache/beam/sdk/util/SerializerTest.java    |   1 -
 .../apache/beam/sdk/util/StringUtilsTest.java   |  16 +-
 .../org/apache/beam/sdk/util/TriggerTester.java |   1 +
 .../beam/sdk/util/common/CounterTest.java       |   1 +
 .../beam/sdk/values/PCollectionTupleTest.java   |   4 +-
 .../apache/beam/sdk/values/TypedPValueTest.java |   6 +-
 .../beam/sdk/extensions/joinlibrary/Join.java   |   8 +-
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    |  18 +-
 .../beam/sdk/io/gcp/bigtable/BigtableIO.java    |   4 +-
 .../beam/sdk/io/gcp/datastore/V1Beta3.java      |  13 +-
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     |   6 +-
 .../sdk/io/gcp/bigtable/BigtableWriteIT.java    |   4 +-
 .../sdk/io/gcp/datastore/V1Beta3TestUtil.java   |   6 +-
 .../java/org/apache/beam/sdk/io/jms/JmsIO.java  |   4 +-
 .../org/apache/beam/sdk/io/kafka/KafkaIO.java   |   8 +-
 .../apache/beam/sdk/io/kafka/KafkaIOTest.java   |   7 +-
 .../sdk/transforms/WithTimestampsJava8Test.java |   4 +-
 .../src/main/java/DebuggingWordCount.java       |   4 +-
 .../src/main/java/MinimalWordCount.java         |   6 +-
 .../src/main/java/WindowedWordCount.java        |   6 +-
 .../src/main/java/WordCount.java                |   6 +-
 .../main/java/common/PubsubFileInjector.java    |   4 +-
 .../src/main/java/StarterPipeline.java          |   6 +-
 .../src/main/java/it/pkg/StarterPipeline.java   |   6 +-
 .../transforms/DoFnReflectorBenchmark.java      |  49 +-
 265 files changed, 2641 insertions(+), 2596 deletions(-)
----------------------------------------------------------------------



[28/51] [abbrv] incubator-beam git commit: Deprecate OldDoFn

Posted by ke...@apache.org.
Deprecate OldDoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/fb6d2c2e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/fb6d2c2e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/fb6d2c2e

Branch: refs/heads/python-sdk
Commit: fb6d2c2e3b97ca67450ea0ca84bbb40667b48a92
Parents: e73d163
Author: Kenneth Knowles <kl...@google.com>
Authored: Thu Aug 4 10:02:22 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Thu Aug 4 11:47:31 2016 -0700

----------------------------------------------------------------------
 .../core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/fb6d2c2e/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
index f640442..443599a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
@@ -72,7 +72,9 @@ import java.util.UUID;
  *
  * @param <InputT> the type of the (main) input elements
  * @param <OutputT> the type of the (main) output elements
+ * @deprecated Uses of {@link OldDoFn} should be replaced by the new {@link DoFn}.
  */
+@Deprecated
 public abstract class OldDoFn<InputT, OutputT> implements Serializable, HasDisplayData {
 
   /**


[23/51] [abbrv] incubator-beam git commit: Closes #707

Posted by ke...@apache.org.
Closes #707


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/34d50127
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/34d50127
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/34d50127

Branch: refs/heads/python-sdk
Commit: 34d501278344e90115c4baea3af6301c37f58972
Parents: 595d2d4 8db6114
Author: Dan Halperin <dh...@google.com>
Authored: Wed Aug 3 23:41:22 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 23:41:22 2016 -0700

----------------------------------------------------------------------
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    | 585 ++++++++++++++-----
 .../sdk/io/gcp/bigquery/BigQueryServices.java   |   7 +
 .../io/gcp/bigquery/BigQueryServicesImpl.java   |  51 +-
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     | 213 ++++++-
 4 files changed, 693 insertions(+), 163 deletions(-)
----------------------------------------------------------------------



[17/51] [abbrv] incubator-beam git commit: Closes #783

Posted by ke...@apache.org.
Closes #783


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/c314e670
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/c314e670
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/c314e670

Branch: refs/heads/python-sdk
Commit: c314e670e0113cddc40af680f5ce8a5134d61e9a
Parents: 9a329aa 14c6d99
Author: Dan Halperin <dh...@google.com>
Authored: Wed Aug 3 22:28:17 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 22:28:17 2016 -0700

----------------------------------------------------------------------
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------



[29/51] [abbrv] incubator-beam git commit: add back unnecessary changes

Posted by ke...@apache.org.
add back unnecessary changes


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/8942c0a9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/8942c0a9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/8942c0a9

Branch: refs/heads/python-sdk
Commit: 8942c0a90eb12a1f1f04c2fc47e9cbe1ed2be5b8
Parents: d7a02a1
Author: Mark Liu <ma...@markliu0.mtv.corp.google.com>
Authored: Thu Aug 4 10:26:04 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Thu Aug 4 11:50:08 2016 -0700

----------------------------------------------------------------------
 examples/java/pom.xml | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/8942c0a9/examples/java/pom.xml
----------------------------------------------------------------------
diff --git a/examples/java/pom.xml b/examples/java/pom.xml
index dca2318..10e7503 100644
--- a/examples/java/pom.xml
+++ b/examples/java/pom.xml
@@ -275,6 +275,7 @@
       <artifactId>beam-runners-direct-java</artifactId>
       <version>${project.version}</version>
       <scope>runtime</scope>
+      <optional>true</optional>
     </dependency>
 
     <dependency>


[43/51] [abbrv] incubator-beam git commit: This closes #782

Posted by ke...@apache.org.
This closes #782


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/8daf518b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/8daf518b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/8daf518b

Branch: refs/heads/python-sdk
Commit: 8daf518bccfe425082c7d0b3f31f3623ff67e000
Parents: fcf6b1d 47341e1
Author: Kenneth Knowles <kl...@google.com>
Authored: Thu Aug 4 20:10:55 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 20:10:55 2016 -0700

----------------------------------------------------------------------
 .../beam/runners/dataflow/DataflowRunner.java   |  3 +-
 .../java/org/apache/beam/sdk/io/PubsubIO.java   | 14 +++----
 .../apache/beam/sdk/io/PubsubUnboundedSink.java | 17 ++++-----
 .../beam/sdk/io/PubsubUnboundedSource.java      |  7 ++--
 .../beam/sdk/options/PipelineOptions.java       |  9 ++---
 .../org/apache/beam/sdk/testing/PAssert.java    | 39 ++++++++++----------
 .../org/apache/beam/sdk/transforms/Count.java   |  4 +-
 .../org/apache/beam/sdk/transforms/Create.java  |  4 +-
 .../beam/sdk/transforms/DoFnReflector.java      |  6 +++
 .../beam/sdk/transforms/FlatMapElements.java    |  4 +-
 .../org/apache/beam/sdk/transforms/Flatten.java |  4 +-
 .../org/apache/beam/sdk/transforms/Keys.java    |  4 +-
 .../org/apache/beam/sdk/transforms/KvSwap.java  |  4 +-
 .../apache/beam/sdk/transforms/MapElements.java |  4 +-
 .../org/apache/beam/sdk/transforms/ParDo.java   |  2 +-
 .../apache/beam/sdk/transforms/Partition.java   |  4 +-
 .../beam/sdk/transforms/RemoveDuplicates.java   |  4 +-
 .../org/apache/beam/sdk/transforms/Sample.java  |  6 +--
 .../org/apache/beam/sdk/transforms/Values.java  |  4 +-
 .../org/apache/beam/sdk/transforms/View.java    |  8 ++--
 .../apache/beam/sdk/transforms/WithKeys.java    |  4 +-
 .../beam/sdk/transforms/WithTimestamps.java     |  6 +--
 .../beam/sdk/transforms/join/CoGroupByKey.java  | 16 ++++----
 .../java/org/apache/beam/sdk/PipelineTest.java  |  8 ++--
 .../apache/beam/sdk/coders/AvroCoderTest.java   |  6 +--
 .../beam/sdk/coders/CoderRegistryTest.java      | 10 ++---
 .../beam/sdk/coders/SerializableCoderTest.java  | 10 ++---
 .../apache/beam/sdk/io/CountingInputTest.java   |  6 +--
 .../apache/beam/sdk/io/CountingSourceTest.java  |  6 +--
 .../beam/sdk/io/PubsubUnboundedSinkTest.java    |  6 +--
 .../sdk/transforms/ApproximateUniqueTest.java   |  6 +--
 .../beam/sdk/transforms/CombineFnsTest.java     |  4 +-
 .../apache/beam/sdk/transforms/CombineTest.java | 18 ++++-----
 .../apache/beam/sdk/transforms/CreateTest.java  |  4 +-
 .../apache/beam/sdk/transforms/FlattenTest.java |  8 ++--
 .../beam/sdk/transforms/GroupByKeyTest.java     |  8 ++--
 .../beam/sdk/transforms/WithTimestampsTest.java | 12 +++---
 .../display/DisplayDataEvaluatorTest.java       | 10 ++---
 .../sdk/transforms/display/DisplayDataTest.java |  6 +--
 .../sdk/transforms/join/CoGroupByKeyTest.java   | 34 ++++++++---------
 .../sdk/transforms/windowing/WindowTest.java    | 10 ++---
 .../sdk/transforms/windowing/WindowingTest.java | 23 ++++++------
 .../beam/sdk/values/PCollectionTupleTest.java   |  6 +--
 .../apache/beam/sdk/values/TypedPValueTest.java | 10 ++---
 .../beam/sdk/extensions/joinlibrary/Join.java   | 14 +++----
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    | 36 +++++++++---------
 .../beam/sdk/io/gcp/bigtable/BigtableIO.java    | 12 +++---
 .../beam/sdk/io/gcp/datastore/V1Beta3.java      | 18 ++++-----
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     | 10 ++---
 .../sdk/io/gcp/bigtable/BigtableWriteIT.java    |  6 +--
 .../sdk/io/gcp/datastore/V1Beta3TestUtil.java   |  9 ++---
 .../java/org/apache/beam/sdk/io/jms/JmsIO.java  | 10 ++---
 .../org/apache/beam/sdk/io/kafka/KafkaIO.java   | 19 +++++-----
 .../apache/beam/sdk/io/kafka/KafkaIOTest.java   | 10 ++---
 54 files changed, 265 insertions(+), 267 deletions(-)
----------------------------------------------------------------------



[14/51] [abbrv] incubator-beam git commit: Port MinimalWordCount example from OldDoFn to DoFn

Posted by ke...@apache.org.
Port MinimalWordCount example from OldDoFn to DoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/4ceec0e8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/4ceec0e8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/4ceec0e8

Branch: refs/heads/python-sdk
Commit: 4ceec0e86f1c4e885168957299dbe81c61fbc7e7
Parents: 64481d0
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:28:42 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/beam/examples/MinimalWordCount.java     | 9 ++++-----
 1 file changed, 4 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4ceec0e8/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
index ab0bb6d..df725e3 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
@@ -22,8 +22,8 @@ import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Count;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.MapElements;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SimpleFunction;
 import org.apache.beam.sdk.values.KV;
@@ -89,12 +89,11 @@ public class MinimalWordCount {
     // the input text (a set of Shakespeare's texts).
     p.apply(TextIO.Read.from("gs://dataflow-samples/shakespeare/*"))
      // Concept #2: Apply a ParDo transform to our PCollection of text lines. This ParDo invokes a
-     // OldDoFn (defined in-line) on each element that tokenizes the text line into individua
-     // words.
+     // DoFn (defined in-line) on each element that tokenizes the text line into individual words.
      // The ParDo returns a PCollection<String>, where each element is an individual word in
      // Shakespeare's collected texts.
-     .apply("ExtractWords", ParDo.of(new OldDoFn<String, String>() {
-                       @Override
+     .apply("ExtractWords", ParDo.of(new DoFn<String, String>() {
+                       @ProcessElement
                        public void processElement(ProcessContext c) {
                          for (String word : c.element().split("[^a-zA-Z']+")) {
                            if (!word.isEmpty()) {


[31/51] [abbrv] incubator-beam git commit: Closes #767

Posted by ke...@apache.org.
Closes #767


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/734bfb9a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/734bfb9a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/734bfb9a

Branch: refs/heads/python-sdk
Commit: 734bfb9afc29a4d760cb8d996638021f0d55b8c6
Parents: be2758c 8942c0a
Author: Dan Halperin <dh...@google.com>
Authored: Thu Aug 4 11:50:09 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Thu Aug 4 11:50:09 2016 -0700

----------------------------------------------------------------------
 examples/java/pom.xml | 7 +++++++
 1 file changed, 7 insertions(+)
----------------------------------------------------------------------



[37/51] [abbrv] incubator-beam git commit: Port easy Java SDK tests to new DoFn

Posted by ke...@apache.org.
Port easy Java SDK tests to new DoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/1959ddbe
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/1959ddbe
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/1959ddbe

Branch: refs/heads/python-sdk
Commit: 1959ddbedb2ad61824bf28e1e9139cc677a2aaf5
Parents: f5011e5
Author: Kenneth Knowles <kl...@google.com>
Authored: Wed Aug 3 20:15:12 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 14:56:42 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/beam/sdk/PipelineTest.java  |  8 ++---
 .../apache/beam/sdk/coders/AvroCoderTest.java   |  6 ++--
 .../beam/sdk/coders/CoderRegistryTest.java      | 10 +++---
 .../beam/sdk/coders/SerializableCoderTest.java  | 10 +++---
 .../apache/beam/sdk/io/CountingInputTest.java   |  6 ++--
 .../apache/beam/sdk/io/CountingSourceTest.java  |  6 ++--
 .../beam/sdk/io/PubsubUnboundedSinkTest.java    |  6 ++--
 .../sdk/transforms/ApproximateUniqueTest.java   |  6 ++--
 .../beam/sdk/transforms/CombineFnsTest.java     |  4 +--
 .../apache/beam/sdk/transforms/CombineTest.java | 18 +++++------
 .../apache/beam/sdk/transforms/CreateTest.java  |  4 +--
 .../apache/beam/sdk/transforms/FlattenTest.java |  8 ++---
 .../beam/sdk/transforms/GroupByKeyTest.java     |  8 ++---
 .../beam/sdk/transforms/WithTimestampsTest.java | 12 +++----
 .../display/DisplayDataEvaluatorTest.java       | 10 +++---
 .../sdk/transforms/display/DisplayDataTest.java |  6 ++--
 .../sdk/transforms/join/CoGroupByKeyTest.java   | 34 ++++++++++----------
 .../sdk/transforms/windowing/WindowTest.java    | 10 +++---
 .../sdk/transforms/windowing/WindowingTest.java | 23 ++++++-------
 .../beam/sdk/values/PCollectionTupleTest.java   |  6 ++--
 .../apache/beam/sdk/values/TypedPValueTest.java | 10 +++---
 21 files changed, 106 insertions(+), 105 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
index 5137031..8b86499 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
@@ -36,8 +36,8 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.UserCodeException;
@@ -146,9 +146,9 @@ public class PipelineTest {
 
   private static PTransform<PCollection<? extends String>, PCollection<String>> addSuffix(
       final String suffix) {
-    return ParDo.of(new OldDoFn<String, String>() {
-      @Override
-      public void processElement(OldDoFn<String, String>.ProcessContext c) {
+    return ParDo.of(new DoFn<String, String>() {
+      @ProcessElement
+      public void processElement(DoFn<String, String>.ProcessContext c) {
         c.output(c.element() + suffix);
       }
     });

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
index 41d0932..3b13e35 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
@@ -31,7 +31,7 @@ import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.CloudObject;
 import org.apache.beam.sdk.util.SerializableUtils;
@@ -134,8 +134,8 @@ public class AvroCoderTest {
     }
   }
 
-  private static class GetTextFn extends OldDoFn<Pojo, String> {
-    @Override
+  private static class GetTextFn extends DoFn<Pojo, String> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       c.output(c.element().text);
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
index 35ec6c6..da15405 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.coders.protobuf.ProtoCoder;
 import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.CloudObject;
@@ -366,8 +366,8 @@ public class CoderRegistryTest {
   private static class PTransformOutputingMySerializableGeneric
   extends PTransform<PCollection<String>, PCollection<KV<String, MySerializableGeneric<String>>>> {
 
-    private class OutputDoFn extends OldDoFn<String, KV<String, MySerializableGeneric<String>>> {
-      @Override
+    private class OutputDoFn extends DoFn<String, KV<String, MySerializableGeneric<String>>> {
+      @ProcessElement
       public void processElement(ProcessContext c) { }
     }
 
@@ -430,8 +430,8 @@ public class CoderRegistryTest {
       PCollection<String>,
       PCollection<KV<String, MySerializableGeneric<T>>>> {
 
-    private class OutputDoFn extends OldDoFn<String, KV<String, MySerializableGeneric<T>>> {
-      @Override
+    private class OutputDoFn extends DoFn<String, KV<String, MySerializableGeneric<T>>> {
+      @ProcessElement
       public void processElement(ProcessContext c) { }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
index 3e7fd50..b5465fa 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.CloudObject;
 import org.apache.beam.sdk.util.CoderUtils;
@@ -82,15 +82,15 @@ public class SerializableCoderTest implements Serializable {
     }
   }
 
-  static class StringToRecord extends OldDoFn<String, MyRecord> {
-    @Override
+  static class StringToRecord extends DoFn<String, MyRecord> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       c.output(new MyRecord(c.element()));
     }
   }
 
-  static class RecordToString extends OldDoFn<MyRecord, String> {
-    @Override
+  static class RecordToString extends DoFn<MyRecord, String> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       c.output(c.element().value);
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
index 95f7454..4ec2c9a 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
@@ -29,9 +29,9 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Count;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Max;
 import org.apache.beam.sdk.transforms.Min;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.RemoveDuplicates;
@@ -120,8 +120,8 @@ public class CountingInputTest {
     assertThat(endTime.isAfter(startTime.plus(expectedRuntimeMillis)), is(true));
   }
 
-  private static class ElementValueDiff extends OldDoFn<Long, Long> {
-    @Override
+  private static class ElementValueDiff extends DoFn<Long, Long> {
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       c.output(c.element() - c.timestamp().getMillis());
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
index 45f636f..0bd91c1 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
@@ -34,10 +34,10 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Count;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.Max;
 import org.apache.beam.sdk.transforms.Min;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.RemoveDuplicates;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -159,8 +159,8 @@ public class CountingSourceTest {
     p.run();
   }
 
-  private static class ElementValueDiff extends OldDoFn<Long, Long> {
-    @Override
+  private static class ElementValueDiff extends DoFn<Long, Long> {
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       c.output(c.element() - c.timestamp().getMillis());
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
index f8592c9..db03a5c 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.testing.CoderProperties;
 import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.PubsubClient;
 import org.apache.beam.sdk.util.PubsubClient.OutgoingMessage;
@@ -58,8 +58,8 @@ public class PubsubUnboundedSinkTest {
   private static final String ID_LABEL = "id";
   private static final int NUM_SHARDS = 10;
 
-  private static class Stamp extends OldDoFn<String, String> {
-    @Override
+  private static class Stamp extends DoFn<String, String> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       c.outputWithTimestamp(c.element(), new Instant(TIMESTAMP));
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
index 5c8732f..7b6d671 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
@@ -54,7 +54,7 @@ import java.util.List;
  */
 @RunWith(JUnit4.class)
 public class ApproximateUniqueTest implements Serializable {
-  // implements Serializable just to make it easy to use anonymous inner OldDoFn subclasses
+  // implements Serializable just to make it easy to use anonymous inner DoFn subclasses
 
   @Test
   public void testEstimationErrorToSampleSize() {
@@ -223,8 +223,8 @@ public class ApproximateUniqueTest implements Serializable {
             .apply(View.<Long>asSingleton());
 
     PCollection<KV<Long, Long>> approximateAndExact = approximate
-        .apply(ParDo.of(new OldDoFn<Long, KV<Long, Long>>() {
-              @Override
+        .apply(ParDo.of(new DoFn<Long, KV<Long, Long>>() {
+              @ProcessElement
               public void processElement(ProcessContext c) {
                 c.output(KV.of(c.element(), c.sideInput(exact)));
               }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
index d6bf826..95ba1aa 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
@@ -461,7 +461,7 @@ public class  CombineFnsTest {
   }
 
   private static class ExtractResultDoFn
-      extends OldDoFn<KV<String, CoCombineResult>, KV<String, KV<Integer, String>>> {
+      extends DoFn<KV<String, CoCombineResult>, KV<String, KV<Integer, String>>> {
 
     private final TupleTag<Integer> maxIntTag;
     private final TupleTag<UserString> concatStringTag;
@@ -471,7 +471,7 @@ public class  CombineFnsTest {
       this.concatStringTag = concatStringTag;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       UserString userString = c.element().getValue().get(concatStringTag);
       KV<Integer, String> value = KV.of(

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
index cb9928e..6421b3b 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
@@ -117,7 +117,7 @@ public class CombineTest implements Serializable {
     1, 1, 2, 3, 5, 8, 13, 21, 34, 55
   };
 
-  @Mock private OldDoFn<?, ?>.ProcessContext processContext;
+  @Mock private DoFn<?, ?>.ProcessContext processContext;
 
   PCollection<KV<String, Integer>> createInput(Pipeline p,
                                                KV<String, Integer>[] table) {
@@ -372,8 +372,8 @@ public class CombineTest implements Serializable {
     pipeline.run();
   }
 
-  private static class FormatPaneInfo extends OldDoFn<Integer, String> {
-    @Override
+  private static class FormatPaneInfo extends DoFn<Integer, String> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       c.output(c.element() + ": " + c.pane().isLast());
     }
@@ -560,8 +560,8 @@ public class CombineTest implements Serializable {
     pipeline.run();
   }
 
-  private static class GetLast extends OldDoFn<Integer, Integer> {
-    @Override
+  private static class GetLast extends DoFn<Integer, Integer> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       if (c.pane().isLast()) {
         c.output(c.element());
@@ -653,8 +653,8 @@ public class CombineTest implements Serializable {
 
     PCollection<Integer> output = pipeline
         .apply("CreateVoidMainInput", Create.of((Void) null))
-        .apply("OutputSideInput", ParDo.of(new OldDoFn<Void, Integer>() {
-                  @Override
+        .apply("OutputSideInput", ParDo.of(new DoFn<Void, Integer>() {
+                  @ProcessElement
                   public void processElement(ProcessContext c) {
                     c.output(c.sideInput(view));
                   }
@@ -1176,8 +1176,8 @@ public class CombineTest implements Serializable {
   }
 
   private static <T> PCollection<T> copy(PCollection<T> pc, final int n) {
-    return pc.apply(ParDo.of(new OldDoFn<T, T>() {
-      @Override
+    return pc.apply(ParDo.of(new DoFn<T, T>() {
+      @ProcessElement
       public void processElement(ProcessContext c) throws Exception {
         for (int i = 0; i < n; i++) {
           c.output(c.element());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
index cf65423..9db0136 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
@@ -229,8 +229,8 @@ public class CreateTest {
     p.run();
   }
 
-  private static class PrintTimestamps extends OldDoFn<String, String> {
-    @Override
+  private static class PrintTimestamps extends DoFn<String, String> {
+    @ProcessElement
       public void processElement(ProcessContext c) {
       c.output(c.element() + ":" + c.timestamp().getMillis());
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
index b81eedb..604536b 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
@@ -130,8 +130,8 @@ public class FlattenTest implements Serializable {
 
     PCollection<String> output = p
         .apply(Create.of((Void) null).withCoder(VoidCoder.of()))
-        .apply(ParDo.withSideInputs(view).of(new OldDoFn<Void, String>() {
-                  @Override
+        .apply(ParDo.withSideInputs(view).of(new DoFn<Void, String>() {
+                  @ProcessElement
                   public void processElement(ProcessContext c) {
                     for (String side : c.sideInput(view)) {
                       c.output(side);
@@ -339,8 +339,8 @@ public class FlattenTest implements Serializable {
 
   /////////////////////////////////////////////////////////////////////////////
 
-  private static class IdentityFn<T> extends OldDoFn<T, T> {
-    @Override
+  private static class IdentityFn<T> extends DoFn<T, T> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       c.output(c.element());
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
index 15c3ba8..afe460f 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
@@ -371,14 +371,14 @@ public class GroupByKeyTest {
     pipeline.run();
   }
 
-  private static class AssertTimestamp<K, V> extends OldDoFn<KV<K, V>, Void> {
+  private static class AssertTimestamp<K, V> extends DoFn<KV<K, V>, Void> {
     private final Instant timestamp;
 
     public AssertTimestamp(Instant timestamp) {
       this.timestamp = timestamp;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       assertThat(c.timestamp(), equalTo(timestamp));
     }
@@ -506,9 +506,9 @@ public class GroupByKeyTest {
    * Creates a KV that wraps the original KV together with a random key.
    */
   static class AssignRandomKey
-      extends OldDoFn<KV<BadEqualityKey, Long>, KV<Long, KV<BadEqualityKey, Long>>> {
+      extends DoFn<KV<BadEqualityKey, Long>, KV<Long, KV<BadEqualityKey, Long>>> {
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       c.output(KV.of(ThreadLocalRandom.current().nextLong(), c.element()));
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
index d2ba452..e381470 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
@@ -65,9 +65,9 @@ public class WithTimestampsTest implements Serializable {
          .apply(WithTimestamps.of(timestampFn));
 
     PCollection<KV<String, Instant>> timestampedVals =
-        timestamped.apply(ParDo.of(new OldDoFn<String, KV<String, Instant>>() {
-          @Override
-          public void processElement(OldDoFn<String, KV<String, Instant>>.ProcessContext c)
+        timestamped.apply(ParDo.of(new DoFn<String, KV<String, Instant>>() {
+          @ProcessElement
+          public void processElement(DoFn<String, KV<String, Instant>>.ProcessContext c)
               throws Exception {
             c.output(KV.of(c.element(), c.timestamp()));
           }
@@ -150,9 +150,9 @@ public class WithTimestampsTest implements Serializable {
              WithTimestamps.of(backInTimeFn).withAllowedTimestampSkew(skew.plus(100L)));
 
     PCollection<KV<String, Instant>> timestampedVals =
-        timestampedWithSkew.apply(ParDo.of(new OldDoFn<String, KV<String, Instant>>() {
-          @Override
-          public void processElement(OldDoFn<String, KV<String, Instant>>.ProcessContext c)
+        timestampedWithSkew.apply(ParDo.of(new DoFn<String, KV<String, Instant>>() {
+          @ProcessElement
+          public void processElement(DoFn<String, KV<String, Instant>>.ProcessContext c)
               throws Exception {
             c.output(KV.of(c.element(), c.timestamp()));
           }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
index c1848c6..e233114 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
@@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.not;
 import static org.junit.Assert.assertThat;
 
 import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.PBegin;
@@ -50,8 +50,8 @@ public class DisplayDataEvaluatorTest implements Serializable {
         new PTransform<PCollection<String>, POutput> () {
           @Override
           public PCollection<String> apply(PCollection<String> input) {
-            return input.apply(ParDo.of(new OldDoFn<String, String>() {
-              @Override
+            return input.apply(ParDo.of(new DoFn<String, String>() {
+              @ProcessElement
               public void processElement(ProcessContext c) throws Exception {
                 c.output(c.element());
               }
@@ -79,8 +79,8 @@ public class DisplayDataEvaluatorTest implements Serializable {
   @Test
   public void testPrimitiveTransform() {
     PTransform<? super PCollection<Integer>, ? super PCollection<Integer>> myTransform = ParDo.of(
-        new OldDoFn<Integer, Integer>() {
-      @Override
+        new DoFn<Integer, Integer>() {
+      @ProcessElement
       public void processElement(ProcessContext c) throws Exception {}
 
       @Override

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
index 517f968..e2f38b4 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
@@ -41,7 +41,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
 
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.display.DisplayData.Builder;
@@ -1053,8 +1053,8 @@ public class DisplayDataTest implements Serializable {
   private static class IdentityTransform<T> extends PTransform<PCollection<T>, PCollection<T>> {
     @Override
     public PCollection<T> apply(PCollection<T> input) {
-      return input.apply(ParDo.of(new OldDoFn<T, T>() {
-        @Override
+      return input.apply(ParDo.of(new DoFn<T, T>() {
+        @ProcessElement
         public void processElement(ProcessContext c) throws Exception {
           c.output(c.element());
         }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
index 97667a3..c6f82ec 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
@@ -29,9 +29,8 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.DoFnTester;
-import org.apache.beam.sdk.transforms.OldDoFn;
-import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -84,10 +83,11 @@ public class CoGroupByKeyTest implements Serializable {
       input = p.apply("Create" + name, Create.timestamped(list, timestamps)
           .withCoder(KvCoder.of(BigEndianIntegerCoder.of(), StringUtf8Coder.of())));
     }
-    return input
-            .apply("Identity" + name, ParDo.of(new OldDoFn<KV<Integer, String>,
-                                                 KV<Integer, String>>() {
-              @Override
+    return input.apply(
+        "Identity" + name,
+        ParDo.of(
+            new DoFn<KV<Integer, String>, KV<Integer, String>>() {
+              @ProcessElement
               public void processElement(ProcessContext c) {
                 c.output(c.element());
               }
@@ -313,11 +313,11 @@ public class CoGroupByKeyTest implements Serializable {
   }
 
   /**
-   * A OldDoFn used in testCoGroupByKeyWithWindowing(), to test processing the
-   * results of a CoGroupByKey.
+   * A DoFn used in testCoGroupByKeyWithWindowing(), to test processing the results of a
+   * CoGroupByKey.
    */
-  private static class ClickOfPurchaseFn extends
-      OldDoFn<KV<Integer, CoGbkResult>, KV<String, String>> implements RequiresWindowAccess {
+  private static class ClickOfPurchaseFn
+      extends DoFn<KV<Integer, CoGbkResult>, KV<String, String>> {
     private final TupleTag<String> clicksTag;
 
     private final TupleTag<String> purchasesTag;
@@ -329,9 +329,9 @@ public class CoGroupByKeyTest implements Serializable {
       this.purchasesTag = purchasesTag;
     }
 
-    @Override
-    public void processElement(ProcessContext c) {
-      BoundedWindow w = c.window();
+    @ProcessElement
+    public void processElement(ProcessContext c, BoundedWindow window) {
+      BoundedWindow w = window;
       KV<Integer, CoGbkResult> e = c.element();
       CoGbkResult row = e.getValue();
       Iterable<String> clicks = row.getAll(clicksTag);
@@ -347,11 +347,11 @@ public class CoGroupByKeyTest implements Serializable {
 
 
   /**
-   * A OldDoFn used in testCoGroupByKeyHandleResults(), to test processing the
+   * A DoFn used in testCoGroupByKeyHandleResults(), to test processing the
    * results of a CoGroupByKey.
    */
   private static class CorrelatePurchaseCountForAddressesWithoutNamesFn extends
-      OldDoFn<KV<Integer, CoGbkResult>, KV<String, Integer>> {
+      DoFn<KV<Integer, CoGbkResult>, KV<String, Integer>> {
     private final TupleTag<String> purchasesTag;
 
     private final TupleTag<String> addressesTag;
@@ -367,7 +367,7 @@ public class CoGroupByKeyTest implements Serializable {
       this.namesTag = namesTag;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       KV<Integer, CoGbkResult> e = c.element();
       CoGbkResult row = e.getValue();
@@ -401,7 +401,7 @@ public class CoGroupByKeyTest implements Serializable {
   }
 
   /**
-   * Tests that the consuming OldDoFn
+   * Tests that the consuming DoFn
    * (CorrelatePurchaseCountForAddressesWithoutNamesFn) performs as expected.
    */
   @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
index 27d2539..c583860 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
@@ -36,8 +36,8 @@ import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.util.WindowingStrategy;
@@ -199,8 +199,8 @@ public class WindowTest implements Serializable {
         .apply(GroupByKey.<Integer, String>create())
         .apply(
             ParDo.of(
-                new OldDoFn<KV<Integer, Iterable<String>>, Void>() {
-                  @Override
+                new DoFn<KV<Integer, Iterable<String>>, Void>() {
+                  @ProcessElement
                   public void processElement(ProcessContext c) throws Exception {
                     assertThat(
                         c.timestamp(),
@@ -231,8 +231,8 @@ public class WindowTest implements Serializable {
         .apply(Window.<KV<Integer, String>>into(FixedWindows.of(Duration.standardMinutes(10)))
             .withOutputTimeFn(OutputTimeFns.outputAtEndOfWindow()))
         .apply(GroupByKey.<Integer, String>create())
-        .apply(ParDo.of(new OldDoFn<KV<Integer, Iterable<String>>, Void>() {
-          @Override
+        .apply(ParDo.of(new DoFn<KV<Integer, Iterable<String>>, Void>() {
+          @ProcessElement
           public void processElement(ProcessContext c) throws Exception {
             assertThat(c.timestamp(), equalTo(new Instant(10 * 60 * 1000 - 1)));
           }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
index 622a277..159e700 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
@@ -26,9 +26,8 @@ import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
-import org.apache.beam.sdk.transforms.OldDoFn;
-import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.KV;
@@ -58,12 +57,14 @@ public class WindowingTest implements Serializable {
 
   private static class WindowedCount extends PTransform<PCollection<String>, PCollection<String>> {
 
-    private final class FormatCountsDoFn
-        extends OldDoFn<KV<String, Long>, String> implements RequiresWindowAccess {
-      @Override
-          public void processElement(ProcessContext c) {
-        c.output(c.element().getKey() + ":" + c.element().getValue()
-            + ":" + c.timestamp().getMillis() + ":" + c.window());
+    private final class FormatCountsDoFn extends DoFn<KV<String, Long>, String> {
+      @ProcessElement
+      public void processElement(ProcessContext c, BoundedWindow window) {
+        c.output(
+            c.element().getKey()
+                + ":" + c.element().getValue()
+                + ":" + c.timestamp().getMillis()
+                + ":" + window);
       }
     }
     private WindowFn<? super String, ?> windowFn;
@@ -234,9 +235,9 @@ public class WindowingTest implements Serializable {
     p.run();
   }
 
-  /** A OldDoFn that tokenizes lines of text into individual words. */
-  static class ExtractWordsWithTimestampsFn extends OldDoFn<String, String> {
-    @Override
+  /** A DoFn that tokenizes lines of text into individual words. */
+  static class ExtractWordsWithTimestampsFn extends DoFn<String, String> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       String[] words = c.element().split("[^a-zA-Z0-9']+");
       if (words.length == 2) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
index 547c778..13218b2 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.WindowingStrategy;
 import org.apache.beam.sdk.values.PCollection.IsBounded;
@@ -75,8 +75,8 @@ public final class PCollectionTupleTest implements Serializable {
         .apply(Create.of(inputs));
 
     PCollectionTuple outputs = mainInput.apply(ParDo
-        .of(new OldDoFn<Integer, Integer>() {
-          @Override
+        .of(new DoFn<Integer, Integer>() {
+          @ProcessElement
           public void processElement(ProcessContext c) {
             c.sideOutput(sideOutputTag, c.element());
           }})

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/1959ddbe/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
index c525cf1..287223f 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.VarIntCoder;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 
 import org.junit.Rule;
@@ -44,9 +44,9 @@ public class TypedPValueTest {
   @Rule
   public ExpectedException thrown = ExpectedException.none();
 
-  private static class IdentityDoFn extends OldDoFn<Integer, Integer> {
+  private static class IdentityDoFn extends DoFn<Integer, Integer> {
     private static final long serialVersionUID = 0;
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       c.output(c.element());
     }
@@ -129,9 +129,9 @@ public class TypedPValueTest {
   static class EmptyClass {
   }
 
-  private static class EmptyClassDoFn extends OldDoFn<Integer, EmptyClass> {
+  private static class EmptyClassDoFn extends DoFn<Integer, EmptyClass> {
     private static final long serialVersionUID = 0;
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       c.output(new EmptyClass());
     }



[06/51] [abbrv] incubator-beam git commit: Rename DoFn to OldDoFn

Posted by ke...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
index 89243a3..a4af1b0 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
@@ -19,7 +19,7 @@ package org.apache.beam.runners.flink.translation.functions;
 
 import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.util.WindowingStrategy;
 import org.apache.beam.sdk.values.PCollectionView;
@@ -30,13 +30,13 @@ import org.apache.flink.util.Collector;
 import java.util.Map;
 
 /**
- * Encapsulates a {@link org.apache.beam.sdk.transforms.DoFn}
+ * Encapsulates a {@link OldDoFn}
  * inside a Flink {@link org.apache.flink.api.common.functions.RichMapPartitionFunction}.
  */
 public class FlinkDoFnFunction<InputT, OutputT>
     extends RichMapPartitionFunction<WindowedValue<InputT>, WindowedValue<OutputT>> {
 
-  private final DoFn<InputT, OutputT> doFn;
+  private final OldDoFn<InputT, OutputT> doFn;
   private final SerializedPipelineOptions serializedOptions;
 
   private final Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs;
@@ -47,7 +47,7 @@ public class FlinkDoFnFunction<InputT, OutputT>
   private final WindowingStrategy<?, ?> windowingStrategy;
 
   public FlinkDoFnFunction(
-      DoFn<InputT, OutputT> doFn,
+      OldDoFn<InputT, OutputT> doFn,
       WindowingStrategy<?, ?> windowingStrategy,
       Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs,
       PipelineOptions options) {
@@ -56,7 +56,7 @@ public class FlinkDoFnFunction<InputT, OutputT>
     this.serializedOptions = new SerializedPipelineOptions(options);
     this.windowingStrategy = windowingStrategy;
 
-    this.requiresWindowAccess = doFn instanceof DoFn.RequiresWindowAccess;
+    this.requiresWindowAccess = doFn instanceof OldDoFn.RequiresWindowAccess;
     this.hasSideInputs = !sideInputs.isEmpty();
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java
index 9074d72..2d36043 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink.translation.functions;
 import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.CombineFnBase;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
 import org.apache.beam.sdk.transforms.windowing.OutputTimeFn;
@@ -60,7 +60,7 @@ public class FlinkMergingNonShuffleReduceFunction<
 
   private final CombineFnBase.PerKeyCombineFn<K, InputT, AccumT, OutputT> combineFn;
 
-  private final DoFn<KV<K, InputT>, KV<K, OutputT>> doFn;
+  private final OldDoFn<KV<K, InputT>, KV<K, OutputT>> doFn;
 
   private final WindowingStrategy<?, W> windowingStrategy;
 
@@ -81,8 +81,8 @@ public class FlinkMergingNonShuffleReduceFunction<
 
     this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
 
-    // dummy DoFn because we need one for ProcessContext
-    this.doFn = new DoFn<KV<K, InputT>, KV<K, OutputT>>() {
+    // dummy OldDoFn because we need one for ProcessContext
+    this.doFn = new OldDoFn<KV<K, InputT>, KV<K, OutputT>>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java
index f92e76f..6e673fc 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java
@@ -19,7 +19,7 @@ package org.apache.beam.runners.flink.translation.functions;
 
 import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.join.RawUnionValue;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.util.WindowingStrategy;
@@ -32,7 +32,7 @@ import org.apache.flink.util.Collector;
 import java.util.Map;
 
 /**
- * Encapsulates a {@link org.apache.beam.sdk.transforms.DoFn} that uses side outputs
+ * Encapsulates a {@link OldDoFn} that uses side outputs
  * inside a Flink {@link org.apache.flink.api.common.functions.RichMapPartitionFunction}.
  *
  * We get a mapping from {@link org.apache.beam.sdk.values.TupleTag} to output index
@@ -42,7 +42,7 @@ import java.util.Map;
 public class FlinkMultiOutputDoFnFunction<InputT, OutputT>
     extends RichMapPartitionFunction<WindowedValue<InputT>, WindowedValue<RawUnionValue>> {
 
-  private final DoFn<InputT, OutputT> doFn;
+  private final OldDoFn<InputT, OutputT> doFn;
   private final SerializedPipelineOptions serializedOptions;
 
   private final Map<TupleTag<?>, Integer> outputMap;
@@ -55,7 +55,7 @@ public class FlinkMultiOutputDoFnFunction<InputT, OutputT>
   private final WindowingStrategy<?, ?> windowingStrategy;
 
   public FlinkMultiOutputDoFnFunction(
-      DoFn<InputT, OutputT> doFn,
+      OldDoFn<InputT, OutputT> doFn,
       WindowingStrategy<?, ?> windowingStrategy,
       Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs,
       PipelineOptions options,
@@ -64,7 +64,7 @@ public class FlinkMultiOutputDoFnFunction<InputT, OutputT>
     this.serializedOptions = new SerializedPipelineOptions(options);
     this.outputMap = outputMap;
 
-    this.requiresWindowAccess = doFn instanceof DoFn.RequiresWindowAccess;
+    this.requiresWindowAccess = doFn instanceof OldDoFn.RequiresWindowAccess;
     this.hasSideInputs = !sideInputs.isEmpty();
     this.windowingStrategy = windowingStrategy;
     this.sideInputs = sideInputs;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java
index 71b6d27..fab3c85 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java
@@ -18,7 +18,7 @@
 package org.apache.beam.runners.flink.translation.functions;
 
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.join.RawUnionValue;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -35,7 +35,7 @@ import java.util.Collection;
 import java.util.Map;
 
 /**
- * {@link DoFn.ProcessContext} for {@link FlinkMultiOutputDoFnFunction} that supports
+ * {@link OldDoFn.ProcessContext} for {@link FlinkMultiOutputDoFnFunction} that supports
  * side outputs.
  */
 class FlinkMultiOutputProcessContext<InputT, OutputT>
@@ -50,7 +50,7 @@ class FlinkMultiOutputProcessContext<InputT, OutputT>
   FlinkMultiOutputProcessContext(
       PipelineOptions pipelineOptions,
       RuntimeContext runtimeContext,
-      DoFn<InputT, OutputT> doFn,
+      OldDoFn<InputT, OutputT> doFn,
       WindowingStrategy<?, ?> windowingStrategy,
       Collector<WindowedValue<RawUnionValue>> collector,
       Map<TupleTag<?>, Integer> outputMap,

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java
index d49821b..98446f9 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java
@@ -17,18 +17,16 @@
  */
 package org.apache.beam.runners.flink.translation.functions;
 
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.WindowFn;
 
 import org.joda.time.Instant;
 
-import java.util.Collection;
-
 /**
  * {@link WindowFn.AssignContext} for calling a {@link WindowFn} for elements emitted from
- * {@link org.apache.beam.sdk.transforms.DoFn#startBundle(DoFn.Context)}
- * or {@link DoFn#finishBundle(DoFn.Context)}.
+ * {@link OldDoFn#startBundle(OldDoFn.Context)}
+ * or {@link OldDoFn#finishBundle(OldDoFn.Context)}.
  *
  * <p>In those cases the {@code WindowFn} is not allowed to access any element information.
  */

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java
index c29e1df..2db4b7b 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink.translation.functions;
 import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.CombineFnBase;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.OutputTimeFn;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -58,7 +58,7 @@ public class FlinkPartialReduceFunction<K, InputT, AccumT, W extends BoundedWind
 
   protected final CombineFnBase.PerKeyCombineFn<K, InputT, AccumT, ?> combineFn;
 
-  protected final DoFn<KV<K, InputT>, KV<K, AccumT>> doFn;
+  protected final OldDoFn<KV<K, InputT>, KV<K, AccumT>> doFn;
 
   protected final WindowingStrategy<?, W> windowingStrategy;
 
@@ -77,8 +77,8 @@ public class FlinkPartialReduceFunction<K, InputT, AccumT, W extends BoundedWind
     this.sideInputs = sideInputs;
     this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
 
-    // dummy DoFn because we need one for ProcessContext
-    this.doFn = new DoFn<KV<K, InputT>, KV<K, AccumT>>() {
+    // dummy OldDoFn because we need one for ProcessContext
+    this.doFn = new OldDoFn<KV<K, InputT>, KV<K, AccumT>>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java
index 235a803..3954d1f 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.util.TimerInternals;
@@ -48,10 +48,10 @@ import java.util.Iterator;
 import java.util.Map;
 
 /**
- * {@link org.apache.beam.sdk.transforms.DoFn.ProcessContext} for our Flink Wrappers.
+ * {@link OldDoFn.ProcessContext} for our Flink Wrappers.
  */
 class FlinkProcessContext<InputT, OutputT>
-    extends DoFn<InputT, OutputT>.ProcessContext {
+    extends OldDoFn<InputT, OutputT>.ProcessContext {
 
   private final PipelineOptions pipelineOptions;
   private final RuntimeContext runtimeContext;
@@ -67,7 +67,7 @@ class FlinkProcessContext<InputT, OutputT>
   FlinkProcessContext(
       PipelineOptions pipelineOptions,
       RuntimeContext runtimeContext,
-      DoFn<InputT, OutputT> doFn,
+      OldDoFn<InputT, OutputT> doFn,
       WindowingStrategy<?, ?> windowingStrategy,
       Collector<WindowedValue<OutputT>> collector,
       Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs) {
@@ -80,7 +80,7 @@ class FlinkProcessContext<InputT, OutputT>
     this.pipelineOptions = pipelineOptions;
     this.runtimeContext = runtimeContext;
     this.collector = collector;
-    this.requiresWindowAccess = doFn instanceof DoFn.RequiresWindowAccess;
+    this.requiresWindowAccess = doFn instanceof OldDoFn.RequiresWindowAccess;
     this.windowingStrategy = windowingStrategy;
     this.sideInputs = sideInputs;
 
@@ -90,7 +90,7 @@ class FlinkProcessContext<InputT, OutputT>
   FlinkProcessContext(
       PipelineOptions pipelineOptions,
       RuntimeContext runtimeContext,
-      DoFn<InputT, OutputT> doFn,
+      OldDoFn<InputT, OutputT> doFn,
       WindowingStrategy<?, ?> windowingStrategy,
       Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs) {
     doFn.super();
@@ -101,7 +101,7 @@ class FlinkProcessContext<InputT, OutputT>
     this.pipelineOptions = pipelineOptions;
     this.runtimeContext = runtimeContext;
     this.collector = null;
-    this.requiresWindowAccess = doFn instanceof DoFn.RequiresWindowAccess;
+    this.requiresWindowAccess = doFn instanceof OldDoFn.RequiresWindowAccess;
     this.windowingStrategy = windowingStrategy;
     this.sideInputs = sideInputs;
 
@@ -141,7 +141,7 @@ class FlinkProcessContext<InputT, OutputT>
   public BoundedWindow window() {
     if (!requiresWindowAccess) {
       throw new UnsupportedOperationException(
-          "window() is only available in the context of a DoFn marked as RequiresWindowAccess.");
+          "window() is only available in the context of a OldDoFn marked as RequiresWindowAccess.");
     }
     return Iterables.getOnlyElement(windowedValue.getWindows());
   }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java
index 9cbc6b9..b1729a4 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink.translation.functions;
 import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.CombineFnBase;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.OutputTimeFn;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -60,7 +60,7 @@ public class FlinkReduceFunction<K, AccumT, OutputT, W extends BoundedWindow>
 
   protected final CombineFnBase.PerKeyCombineFn<K, ?, AccumT, OutputT> combineFn;
 
-  protected final DoFn<KV<K, AccumT>, KV<K, OutputT>> doFn;
+  protected final OldDoFn<KV<K, AccumT>, KV<K, OutputT>> doFn;
 
   protected final WindowingStrategy<?, W> windowingStrategy;
 
@@ -81,8 +81,8 @@ public class FlinkReduceFunction<K, AccumT, OutputT, W extends BoundedWindow>
 
     this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
 
-    // dummy DoFn because we need one for ProcessContext
-    this.doFn = new DoFn<KV<K, AccumT>, KV<K, OutputT>>() {
+    // dummy OldDoFn because we need one for ProcessContext
+    this.doFn = new OldDoFn<KV<K, AccumT>, KV<K, OutputT>>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java
index e40d6e3..74ec66a 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java
@@ -24,7 +24,7 @@ import org.apache.beam.runners.flink.translation.wrappers.SerializableFnAggregat
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.transforms.windowing.WindowFn;
@@ -52,13 +52,13 @@ import java.util.Collection;
  * */
 public abstract class FlinkAbstractParDoWrapper<IN, OUTDF, OUTFL> extends RichFlatMapFunction<WindowedValue<IN>, WindowedValue<OUTFL>> {
 
-  private final DoFn<IN, OUTDF> doFn;
+  private final OldDoFn<IN, OUTDF> doFn;
   private final WindowingStrategy<?, ?> windowingStrategy;
   private final SerializedPipelineOptions serializedPipelineOptions;
 
   private DoFnProcessContext context;
 
-  public FlinkAbstractParDoWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, DoFn<IN, OUTDF> doFn) {
+  public FlinkAbstractParDoWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, OldDoFn<IN, OUTDF> doFn) {
     checkNotNull(options);
     checkNotNull(windowingStrategy);
     checkNotNull(doFn);
@@ -104,15 +104,15 @@ public abstract class FlinkAbstractParDoWrapper<IN, OUTDF, OUTFL> extends RichFl
     doFn.processElement(this.context);
   }
 
-  private class DoFnProcessContext extends DoFn<IN, OUTDF>.ProcessContext {
+  private class DoFnProcessContext extends OldDoFn<IN, OUTDF>.ProcessContext {
 
-    private final DoFn<IN, OUTDF> fn;
+    private final OldDoFn<IN, OUTDF> fn;
 
     protected final Collector<WindowedValue<OUTFL>> collector;
 
     private WindowedValue<IN> element;
 
-    private DoFnProcessContext(DoFn<IN, OUTDF> function,
+    private DoFnProcessContext(OldDoFn<IN, OUTDF> function,
           Collector<WindowedValue<OUTFL>> outCollector) {
       function.super();
       super.setupDelegateAggregators();
@@ -137,9 +137,9 @@ public abstract class FlinkAbstractParDoWrapper<IN, OUTDF, OUTFL> extends RichFl
 
     @Override
     public BoundedWindow window() {
-      if (!(fn instanceof DoFn.RequiresWindowAccess)) {
+      if (!(fn instanceof OldDoFn.RequiresWindowAccess)) {
         throw new UnsupportedOperationException(
-            "window() is only available in the context of a DoFn marked as RequiresWindowAccess.");
+            "window() is only available in the context of a OldDoFn marked as RequiresWindowAccess.");
       }
 
       Collection<? extends BoundedWindow> windows = this.element.getWindows();
@@ -211,7 +211,7 @@ public abstract class FlinkAbstractParDoWrapper<IN, OUTDF, OUTFL> extends RichFl
       throw new IllegalArgumentException(String.format(
           "Cannot output with timestamp %s. Output timestamps must be no earlier than the "
               + "timestamp of the current input (%s) minus the allowed skew (%s). See the "
-              + "DoFn#getAllowedTimestmapSkew() Javadoc for details on changing the allowed skew.",
+              + "OldDoFn#getAllowedTimestmapSkew() Javadoc for details on changing the allowed skew.",
           timestamp, ref.getTimestamp(),
           PeriodFormat.getDefault().print(doFn.getAllowedTimestampSkew().toPeriod())));
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java
index 0e977db..103a12b 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java
@@ -36,7 +36,7 @@ import org.apache.beam.sdk.coders.KvCoder;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.OutputTimeFn;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -112,7 +112,7 @@ public class FlinkGroupAlsoByWindowWrapper<K, VIN, VACC, VOUT>
 
   private transient CoderRegistry coderRegistry;
 
-  private DoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> operator;
+  private OldDoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> operator;
 
   private ProcessContext context;
 
@@ -263,7 +263,7 @@ public class FlinkGroupAlsoByWindowWrapper<K, VIN, VACC, VOUT>
    * a function with that combiner is created, so that elements are combined as they arrive. This is
    * done for speed and (in most of the cases) for reduction of the per-window state.
    */
-  private <W extends BoundedWindow> DoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> createGroupAlsoByWindowOperator() {
+  private <W extends BoundedWindow> OldDoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> createGroupAlsoByWindowOperator() {
     if (this.operator == null) {
 
       StateInternalsFactory<K> stateInternalsFactory = new GroupAlsoByWindowWrapperStateInternalsFactory();
@@ -272,7 +272,7 @@ public class FlinkGroupAlsoByWindowWrapper<K, VIN, VACC, VOUT>
         // Thus VOUT == Iterable<VIN>
         Coder<VIN> inputValueCoder = inputKvCoder.getValueCoder();
 
-        this.operator = (DoFn) GroupAlsoByWindowViaWindowSetDoFn.create(
+        this.operator = (OldDoFn) GroupAlsoByWindowViaWindowSetDoFn.create(
             (WindowingStrategy<?, W>) this.windowingStrategy, stateInternalsFactory, SystemReduceFn.<K, VIN, W>buffering(inputValueCoder));
       } else {
         Coder<K> inputKeyCoder = inputKvCoder.getKeyCoder();
@@ -446,7 +446,7 @@ public class FlinkGroupAlsoByWindowWrapper<K, VIN, VACC, VOUT>
 
     private KeyedWorkItem<K, VIN> element;
 
-    public ProcessContext(DoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> function,
+    public ProcessContext(OldDoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> function,
                           TimestampedCollector<WindowedValue<KV<K, VOUT>>> outCollector,
                           FlinkTimerInternals timerInternals) {
       function.super();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java
index 619b887..0ea0cab 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink.translation.wrappers.streaming;
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.join.RawUnionValue;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.util.WindowingInternals;
@@ -40,7 +40,7 @@ public class FlinkParDoBoundMultiWrapper<IN, OUT> extends FlinkAbstractParDoWrap
   private final TupleTag<?> mainTag;
   private final Map<TupleTag<?>, Integer> outputLabels;
 
-  public FlinkParDoBoundMultiWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, DoFn<IN, OUT> doFn, TupleTag<?> mainTag, Map<TupleTag<?>, Integer> tagsToLabels) {
+  public FlinkParDoBoundMultiWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, OldDoFn<IN, OUT> doFn, TupleTag<?> mainTag, Map<TupleTag<?>, Integer> tagsToLabels) {
     super(options, windowingStrategy, doFn);
     this.mainTag = checkNotNull(mainTag);
     this.outputLabels = checkNotNull(tagsToLabels);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java
index 4def0c6..6be94b2 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java
@@ -19,7 +19,7 @@ package org.apache.beam.runners.flink.translation.wrappers.streaming;
 
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.util.TimerInternals;
@@ -41,7 +41,7 @@ import java.util.Collection;
  * */
 public class FlinkParDoBoundWrapper<IN, OUT> extends FlinkAbstractParDoWrapper<IN, OUT, OUT> {
 
-  public FlinkParDoBoundWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, DoFn<IN, OUT> doFn) {
+  public FlinkParDoBoundWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, OldDoFn<IN, OUT> doFn) {
     super(options, windowingStrategy, doFn);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java
index 9e55002..a0b33f8 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java
@@ -19,7 +19,7 @@ package org.apache.beam.runners.flink.translation.wrappers.streaming.state;
 
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.TimerInternals;
 
@@ -106,7 +106,7 @@ public abstract class AbstractFlinkTimerInternals<K, VIN> implements TimerIntern
     }
   }
 
-  public void encodeTimerInternals(DoFn.ProcessContext context,
+  public void encodeTimerInternals(OldDoFn.ProcessContext context,
                                    StateCheckpointWriter writer,
                                    KvCoder<K, VIN> kvCoder,
                                    Coder<? extends BoundedWindow> windowCoder) throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
index 61e219c..c24d91d 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.util.WindowedValue;
@@ -114,7 +114,7 @@ public class PipelineOptionsTest {
   }
 
 
-  private static class TestDoFn extends DoFn<Object, Object> {
+  private static class TestDoFn extends OldDoFn<Object, Object> {
 
     @Override
     public void processElement(ProcessContext c) throws Exception {
@@ -126,7 +126,7 @@ public class PipelineOptionsTest {
   }
 
   private static class TestParDoWrapper extends FlinkAbstractParDoWrapper {
-    public TestParDoWrapper(PipelineOptions options, WindowingStrategy windowingStrategy, DoFn doFn) {
+    public TestParDoWrapper(PipelineOptions options, WindowingStrategy windowingStrategy, OldDoFn doFn) {
       super(options, windowingStrategy, doFn);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java
index bb79b27..ca70096 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.CountingInput;
 import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.PCollection;
 
@@ -72,7 +72,7 @@ public class ReadSourceITCase extends JavaProgramTestBase {
 
     PCollection<String> result = p
         .apply(CountingInput.upTo(10))
-        .apply(ParDo.of(new DoFn<Long, String>() {
+        .apply(ParDo.of(new OldDoFn<Long, String>() {
           @Override
           public void processElement(ProcessContext c) throws Exception {
             c.output(c.element().toString());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java
index fe71802..bc69f34 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.CountingInput;
 import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import com.google.common.base.Joiner;
 import org.apache.flink.streaming.util.StreamingProgramTestBase;
@@ -59,7 +59,7 @@ public class ReadSourceStreamingITCase extends StreamingProgramTestBase {
 
     p
       .apply(CountingInput.upTo(10))
-      .apply(ParDo.of(new DoFn<Long, String>() {
+      .apply(ParDo.of(new OldDoFn<Long, String>() {
           @Override
           public void processElement(ProcessContext c) throws Exception {
             c.output(c.element().toString());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java
index 1b55c61..ca183a8 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java
@@ -21,7 +21,7 @@ import org.apache.beam.runners.flink.FlinkTestPipeline;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
@@ -61,7 +61,7 @@ public class GroupByNullKeyTest extends StreamingProgramTestBase implements Seri
     compareResultsByLinesInMemory(Joiner.on('\n').join(EXPECTED_RESULT), resultPath);
   }
 
-  public static class ExtractUserAndTimestamp extends DoFn<KV<Integer, String>, String> {
+  public static class ExtractUserAndTimestamp extends OldDoFn<KV<Integer, String>, String> {
     private static final long serialVersionUID = 0;
 
     @Override
@@ -97,7 +97,7 @@ public class GroupByNullKeyTest extends StreamingProgramTestBase implements Seri
               .withAllowedLateness(Duration.ZERO)
               .discardingFiredPanes())
 
-          .apply(ParDo.of(new DoFn<String, KV<Void, String>>() {
+          .apply(ParDo.of(new OldDoFn<String, KV<Void, String>>() {
             @Override
             public void processElement(ProcessContext c) throws Exception {
               String elem = c.element();
@@ -105,7 +105,7 @@ public class GroupByNullKeyTest extends StreamingProgramTestBase implements Seri
             }
           }))
           .apply(GroupByKey.<Void, String>create())
-          .apply(ParDo.of(new DoFn<KV<Void, Iterable<String>>, String>() {
+          .apply(ParDo.of(new OldDoFn<KV<Void, Iterable<String>>, String>() {
             @Override
             public void processElement(ProcessContext c) throws Exception {
               KV<Void, Iterable<String>> elem = c.element();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java
index 1efb42f..7912aee 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java
@@ -22,7 +22,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.Sessions;
 import org.apache.beam.sdk.transforms.windowing.Window;
@@ -103,7 +103,7 @@ public class TopWikipediaSessionsITCase extends StreamingProgramTestBase impleme
 
 
 
-      .apply(ParDo.of(new DoFn<TableRow, String>() {
+      .apply(ParDo.of(new OldDoFn<TableRow, String>() {
         @Override
         public void processElement(ProcessContext c) throws Exception {
           TableRow row = c.element();
@@ -120,7 +120,7 @@ public class TopWikipediaSessionsITCase extends StreamingProgramTestBase impleme
 
       .apply(Count.<String>perElement());
 
-    PCollection<String> format = output.apply(ParDo.of(new DoFn<KV<String, Long>, String>() {
+    PCollection<String> format = output.apply(ParDo.of(new OldDoFn<KV<String, Long>, String>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {
         KV<String, Long> el = c.element();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java
index 7fd203f..ac06b52 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java
@@ -47,9 +47,9 @@ import org.apache.beam.sdk.options.StreamingOptions;
 import org.apache.beam.sdk.runners.TransformTreeNode;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.View;
@@ -82,7 +82,6 @@ import com.google.api.services.dataflow.model.WorkerPool;
 
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -94,7 +93,6 @@ import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-
 import javax.annotation.Nullable;
 
 /**
@@ -1021,7 +1019,7 @@ public class DataflowPipelineTranslator {
   }
 
   private static void translateFn(
-      DoFn fn,
+      OldDoFn fn,
       WindowingStrategy windowingStrategy,
       Iterable<PCollectionView<?>> sideInputs,
       Coder inputCoder,

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
index e7cc20e..d762d50 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
@@ -78,9 +78,9 @@ import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -143,7 +143,6 @@ import com.google.common.collect.Multimap;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-
 import org.joda.time.DateTimeUtils;
 import org.joda.time.DateTimeZone;
 import org.joda.time.format.DateTimeFormat;
@@ -173,7 +172,6 @@ import java.util.Random;
 import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeSet;
-
 import javax.annotation.Nullable;
 
 /**
@@ -762,13 +760,14 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
       PTransform<PCollection<T>, PCollection<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>>> {
 
     /**
-     * A {@link DoFn} that for each element outputs a {@code KV} structure suitable for
+     * A {@link OldDoFn} that for each element outputs a {@code KV} structure suitable for
      * grouping by the hash of the window's byte representation and sorting the grouped values
      * using the window's byte representation.
      */
     @SystemDoFnInternal
     private static class UseWindowHashAsKeyAndWindowAsSortKeyDoFn<T, W extends BoundedWindow>
-        extends DoFn<T, KV<Integer, KV<W, WindowedValue<T>>>> implements DoFn.RequiresWindowAccess {
+        extends OldDoFn<T, KV<Integer, KV<W, WindowedValue<T>>>> implements
+        OldDoFn.RequiresWindowAccess {
 
       private final IsmRecordCoder<?> ismCoderForHash;
       private UseWindowHashAsKeyAndWindowAsSortKeyDoFn(IsmRecordCoder<?> ismCoderForHash) {
@@ -828,15 +827,15 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
       extends PTransform<PCollection<T>, PCollectionView<T>> {
 
     /**
-     * A {@link DoFn} that outputs {@link IsmRecord}s. These records are structured as follows:
+     * A {@link OldDoFn} that outputs {@link IsmRecord}s. These records are structured as follows:
      * <ul>
      *   <li>Key 1: Window
      *   <li>Value: Windowed value
      * </ul>
      */
     static class IsmRecordForSingularValuePerWindowDoFn<T, W extends BoundedWindow>
-        extends DoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
-                     IsmRecord<WindowedValue<T>>> {
+        extends OldDoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
+                             IsmRecord<WindowedValue<T>>> {
 
       private final Coder<W> windowCoder;
       IsmRecordForSingularValuePerWindowDoFn(Coder<W> windowCoder) {
@@ -902,8 +901,8 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
         applyForSingleton(
             DataflowRunner runner,
             PCollection<T> input,
-            DoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
-                 IsmRecord<WindowedValue<FinalT>>> doFn,
+            OldDoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
+                             IsmRecord<WindowedValue<FinalT>>> doFn,
             boolean hasDefault,
             FinalT defaultValue,
             Coder<FinalT> defaultValueCoder) {
@@ -998,7 +997,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
   static class BatchViewAsList<T>
       extends PTransform<PCollection<T>, PCollectionView<List<T>>> {
     /**
-     * A {@link DoFn} which creates {@link IsmRecord}s assuming that each element is within the
+     * A {@link OldDoFn} which creates {@link IsmRecord}s assuming that each element is within the
      * global window. Each {@link IsmRecord} has
      * <ul>
      *   <li>Key 1: Global window</li>
@@ -1008,7 +1007,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
      */
     @SystemDoFnInternal
     static class ToIsmRecordForGlobalWindowDoFn<T>
-        extends DoFn<T, IsmRecord<WindowedValue<T>>> {
+        extends OldDoFn<T, IsmRecord<WindowedValue<T>>> {
 
       long indexInBundle;
       @Override
@@ -1030,7 +1029,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
     }
 
     /**
-     * A {@link DoFn} which creates {@link IsmRecord}s comparing successive elements windows
+     * A {@link OldDoFn} which creates {@link IsmRecord}s comparing successive elements windows
      * to locate the window boundaries. The {@link IsmRecord} has:
      * <ul>
      *   <li>Key 1: Window</li>
@@ -1040,8 +1039,8 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
      */
     @SystemDoFnInternal
     static class ToIsmRecordForNonGlobalWindowDoFn<T, W extends BoundedWindow>
-        extends DoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
-                     IsmRecord<WindowedValue<T>>> {
+        extends OldDoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
+                             IsmRecord<WindowedValue<T>>> {
 
       private final Coder<W> windowCoder;
       ToIsmRecordForNonGlobalWindowDoFn(Coder<W> windowCoder) {
@@ -1174,7 +1173,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
       extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, V>>> {
 
     /**
-     * A {@link DoFn} which groups elements by window boundaries. For each group,
+     * A {@link OldDoFn} which groups elements by window boundaries. For each group,
      * the group of elements is transformed into a {@link TransformedMap}.
      * The transformed {@code Map<K, V>} is backed by a {@code Map<K, WindowedValue<V>>}
      * and contains a function {@code WindowedValue<V> -> V}.
@@ -1188,10 +1187,10 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
      * </ul>
      */
     static class ToMapDoFn<K, V, W extends BoundedWindow>
-        extends DoFn<KV<Integer, Iterable<KV<W, WindowedValue<KV<K, V>>>>>,
-                     IsmRecord<WindowedValue<TransformedMap<K,
-                                             WindowedValue<V>,
-                                             V>>>> {
+        extends OldDoFn<KV<Integer, Iterable<KV<W, WindowedValue<KV<K, V>>>>>,
+                             IsmRecord<WindowedValue<TransformedMap<K,
+                                                     WindowedValue<V>,
+                                                     V>>>> {
 
       private final Coder<W> windowCoder;
       ToMapDoFn(Coder<W> windowCoder) {
@@ -1358,8 +1357,8 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
 
       @SystemDoFnInternal
       private static class GroupByKeyHashAndSortByKeyAndWindowDoFn<K, V, W>
-          extends DoFn<KV<K, V>, KV<Integer, KV<KV<K, W>, WindowedValue<V>>>>
-          implements DoFn.RequiresWindowAccess {
+          extends OldDoFn<KV<K, V>, KV<Integer, KV<KV<K, W>, WindowedValue<V>>>>
+          implements OldDoFn.RequiresWindowAccess {
 
         private final IsmRecordCoder<?> coder;
         private GroupByKeyHashAndSortByKeyAndWindowDoFn(IsmRecordCoder<?> coder) {
@@ -1412,7 +1411,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
     }
 
     /**
-     * A {@link DoFn} which creates {@link IsmRecord}s comparing successive elements windows
+     * A {@link OldDoFn} which creates {@link IsmRecord}s comparing successive elements windows
      * and keys to locate window and key boundaries. The main output {@link IsmRecord}s have:
      * <ul>
      *   <li>Key 1: Window</li>
@@ -1424,12 +1423,12 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
      * <p>Additionally, we output all the unique keys per window seen to {@code outputForEntrySet}
      * and the unique key count per window to {@code outputForSize}.
      *
-     * <p>Finally, if this DoFn has been requested to perform unique key checking, it will
+     * <p>Finally, if this OldDoFn has been requested to perform unique key checking, it will
      * throw an {@link IllegalStateException} if more than one key per window is found.
      */
     static class ToIsmRecordForMapLikeDoFn<K, V, W extends BoundedWindow>
-        extends DoFn<KV<Integer, Iterable<KV<KV<K, W>, WindowedValue<V>>>>,
-                     IsmRecord<WindowedValue<V>>> {
+        extends OldDoFn<KV<Integer, Iterable<KV<KV<K, W>, WindowedValue<V>>>>,
+                             IsmRecord<WindowedValue<V>>> {
 
       private final TupleTag<KV<Integer, KV<W, Long>>> outputForSize;
       private final TupleTag<KV<Integer, KV<W, K>>> outputForEntrySet;
@@ -1557,7 +1556,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
     }
 
     /**
-     * A {@link DoFn} which outputs a metadata {@link IsmRecord} per window of:
+     * A {@link OldDoFn} which outputs a metadata {@link IsmRecord} per window of:
        * <ul>
        *   <li>Key 1: META key</li>
        *   <li>Key 2: window</li>
@@ -1565,11 +1564,11 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
        *   <li>Value: sum of values for window</li>
        * </ul>
        *
-       * <p>This {@link DoFn} is meant to be used to compute the number of unique keys
+       * <p>This {@link OldDoFn} is meant to be used to compute the number of unique keys
        * per window for map and multimap side inputs.
        */
     static class ToIsmMetadataRecordForSizeDoFn<K, V, W extends BoundedWindow>
-        extends DoFn<KV<Integer, Iterable<KV<W, Long>>>, IsmRecord<WindowedValue<V>>> {
+        extends OldDoFn<KV<Integer, Iterable<KV<W, Long>>>, IsmRecord<WindowedValue<V>>> {
       private final Coder<W> windowCoder;
       ToIsmMetadataRecordForSizeDoFn(Coder<W> windowCoder) {
         this.windowCoder = windowCoder;
@@ -1606,7 +1605,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
     }
 
     /**
-     * A {@link DoFn} which outputs a metadata {@link IsmRecord} per window and key pair of:
+     * A {@link OldDoFn} which outputs a metadata {@link IsmRecord} per window and key pair of:
        * <ul>
        *   <li>Key 1: META key</li>
        *   <li>Key 2: window</li>
@@ -1614,11 +1613,11 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
        *   <li>Value: key</li>
        * </ul>
        *
-       * <p>This {@link DoFn} is meant to be used to output index to key records
+       * <p>This {@link OldDoFn} is meant to be used to output index to key records
        * per window for map and multimap side inputs.
        */
     static class ToIsmMetadataRecordForKeyDoFn<K, V, W extends BoundedWindow>
-        extends DoFn<KV<Integer, Iterable<KV<W, K>>>, IsmRecord<WindowedValue<V>>> {
+        extends OldDoFn<KV<Integer, Iterable<KV<W, K>>>, IsmRecord<WindowedValue<V>>> {
 
       private final Coder<K> keyCoder;
       private final Coder<W> windowCoder;
@@ -1658,7 +1657,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
     }
 
     /**
-     * A {@link DoFn} which partitions sets of elements by window boundaries. Within each
+     * A {@link OldDoFn} which partitions sets of elements by window boundaries. Within each
      * partition, the set of elements is transformed into a {@link TransformedMap}.
      * The transformed {@code Map<K, Iterable<V>>} is backed by a
      * {@code Map<K, Iterable<WindowedValue<V>>>} and contains a function
@@ -1673,10 +1672,10 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
      * </ul>
      */
     static class ToMultimapDoFn<K, V, W extends BoundedWindow>
-        extends DoFn<KV<Integer, Iterable<KV<W, WindowedValue<KV<K, V>>>>>,
-                     IsmRecord<WindowedValue<TransformedMap<K,
-                                                            Iterable<WindowedValue<V>>,
-                                                            Iterable<V>>>>> {
+        extends OldDoFn<KV<Integer, Iterable<KV<W, WindowedValue<KV<K, V>>>>>,
+                             IsmRecord<WindowedValue<TransformedMap<K,
+                                                                    Iterable<WindowedValue<V>>,
+                                                                    Iterable<V>>>>> {
 
       private final Coder<W> windowCoder;
       ToMultimapDoFn(Coder<W> windowCoder) {
@@ -2335,7 +2334,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
           // WindmillSink.
           .apply(Reshuffle.<Integer, ValueWithRecordId<T>>of())
           .apply("StripIds", ParDo.of(
-              new DoFn<KV<Integer, ValueWithRecordId<T>>, T>() {
+              new OldDoFn<KV<Integer, ValueWithRecordId<T>>, T>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   c.output(c.element().getValue().getValue());
@@ -2372,11 +2371,11 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
   }
 
   /**
-   * A specialized {@link DoFn} for writing the contents of a {@link PCollection}
+   * A specialized {@link OldDoFn} for writing the contents of a {@link PCollection}
    * to a streaming {@link PCollectionView} backend implementation.
    */
   private static class StreamingPCollectionViewWriterFn<T>
-  extends DoFn<Iterable<T>, T> implements DoFn.RequiresWindowAccess {
+  extends OldDoFn<Iterable<T>, T> implements OldDoFn.RequiresWindowAccess {
     private final PCollectionView<?> view;
     private final Coder<T> dataCoder;
 
@@ -2553,7 +2552,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
     }
   }
 
-  private static class WrapAsList<T> extends DoFn<T, List<T>> {
+  private static class WrapAsList<T> extends OldDoFn<T, List<T>> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(Arrays.asList(c.element()));
@@ -2716,7 +2715,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
     @Nullable
     private PTransform<?, ?> transform;
     @Nullable
-    private DoFn<?, ?> doFn;
+    private OldDoFn<?, ?> doFn;
 
     /**
      * Builds an instance of this class from the overridden transform.

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/internal/AssignWindows.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/internal/AssignWindows.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/internal/AssignWindows.java
index 5f808a5..d4f9a90 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/internal/AssignWindows.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/internal/AssignWindows.java
@@ -18,7 +18,7 @@
 package org.apache.beam.runners.dataflow.internal;
 
 import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.Window;
@@ -63,9 +63,9 @@ public class AssignWindows<T> extends PTransform<PCollection<T>, PCollection<T>>
     } else {
       // If the windowFn didn't change, we just run a pass-through transform and then set the
       // new windowing strategy.
-      return input.apply("Identity", ParDo.of(new DoFn<T, T>() {
+      return input.apply("Identity", ParDo.of(new OldDoFn<T, T>() {
         @Override
-        public void processElement(DoFn<T, T>.ProcessContext c) throws Exception {
+        public void processElement(OldDoFn<T, T>.ProcessContext c) throws Exception {
           c.output(c.element());
         }
       })).setWindowingStrategyInternal(outputStrategy);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DoFnInfo.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DoFnInfo.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DoFnInfo.java
index f83acbc..2017313 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DoFnInfo.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DoFnInfo.java
@@ -18,32 +18,32 @@
 package org.apache.beam.runners.dataflow.util;
 
 import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.util.WindowingStrategy;
 import org.apache.beam.sdk.values.PCollectionView;
 
 import java.io.Serializable;
 
 /**
- * Wrapper class holding the necessary information to serialize a DoFn.
+ * Wrapper class holding the necessary information to serialize a OldDoFn.
  *
- * @param <InputT> the type of the (main) input elements of the DoFn
- * @param <OutputT> the type of the (main) output elements of the DoFn
+ * @param <InputT> the type of the (main) input elements of the OldDoFn
+ * @param <OutputT> the type of the (main) output elements of the OldDoFn
  */
 public class DoFnInfo<InputT, OutputT> implements Serializable {
-  private final DoFn<InputT, OutputT> doFn;
+  private final OldDoFn<InputT, OutputT> doFn;
   private final WindowingStrategy<?, ?> windowingStrategy;
   private final Iterable<PCollectionView<?>> sideInputViews;
   private final Coder<InputT> inputCoder;
 
-  public DoFnInfo(DoFn<InputT, OutputT> doFn, WindowingStrategy<?, ?> windowingStrategy) {
+  public DoFnInfo(OldDoFn<InputT, OutputT> doFn, WindowingStrategy<?, ?> windowingStrategy) {
     this.doFn = doFn;
     this.windowingStrategy = windowingStrategy;
     this.sideInputViews = null;
     this.inputCoder = null;
   }
 
-  public DoFnInfo(DoFn<InputT, OutputT> doFn, WindowingStrategy<?, ?> windowingStrategy,
+  public DoFnInfo(OldDoFn<InputT, OutputT> doFn, WindowingStrategy<?, ?> windowingStrategy,
                   Iterable<PCollectionView<?>> sideInputViews, Coder<InputT> inputCoder) {
     this.doFn = doFn;
     this.windowingStrategy = windowingStrategy;
@@ -51,7 +51,7 @@ public class DoFnInfo<InputT, OutputT> implements Serializable {
     this.inputCoder = inputCoder;
   }
 
-  public DoFn<InputT, OutputT> getDoFn() {
+  public OldDoFn<InputT, OutputT> getDoFn() {
     return doFn;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslatorTest.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslatorTest.java b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslatorTest.java
index 7d89735..2a01c03 100644
--- a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslatorTest.java
+++ b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslatorTest.java
@@ -49,7 +49,7 @@ import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -506,7 +506,7 @@ public class DataflowPipelineTranslatorTest implements Serializable {
   }
 
   /**
-   * Returns a Step for a DoFn by creating and translating a pipeline.
+   * Returns a Step for a OldDoFn by creating and translating a pipeline.
    */
   private static Step createPredefinedStep() throws Exception {
     DataflowPipelineOptions options = buildPipelineOptions();
@@ -530,7 +530,7 @@ public class DataflowPipelineTranslatorTest implements Serializable {
     return step;
   }
 
-  private static class NoOpFn extends DoFn<String, String> {
+  private static class NoOpFn extends OldDoFn<String, String> {
     @Override public void processElement(ProcessContext c) throws Exception {
       c.output(c.element());
     }
@@ -864,7 +864,7 @@ public class DataflowPipelineTranslatorTest implements Serializable {
     DataflowPipelineTranslator translator = DataflowPipelineTranslator.fromOptions(options);
     Pipeline pipeline = Pipeline.create(options);
 
-    DoFn<Integer, Integer> fn1 = new DoFn<Integer, Integer>() {
+    OldDoFn<Integer, Integer> fn1 = new OldDoFn<Integer, Integer>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {
         c.output(c.element());
@@ -880,7 +880,7 @@ public class DataflowPipelineTranslatorTest implements Serializable {
       }
     };
 
-    DoFn<Integer, Integer> fn2 = new DoFn<Integer, Integer>() {
+    OldDoFn<Integer, Integer> fn2 = new OldDoFn<Integer, Integer>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {
         c.output(c.element());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/examples/WordCount.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/examples/WordCount.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/examples/WordCount.java
index 4951043..0677030 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/examples/WordCount.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/examples/WordCount.java
@@ -25,8 +25,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SimpleFunction;
@@ -44,7 +44,7 @@ public class WordCount {
    * of-line. This DoFn tokenizes lines of text into individual words; we pass it to a ParDo in the
    * pipeline.
    */
-  static class ExtractWordsFn extends DoFn<String, String> {
+  static class ExtractWordsFn extends OldDoFn<String, String> {
     private final Aggregator<Long, Long> emptyLines =
         createAggregator("emptyLines", new Sum.SumLongFn());
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/DoFnFunction.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/DoFnFunction.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/DoFnFunction.java
index b5888bd..f4ce516 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/DoFnFunction.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/DoFnFunction.java
@@ -19,7 +19,7 @@
 package org.apache.beam.runners.spark.translation;
 
 import org.apache.beam.runners.spark.util.BroadcastHelper;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.TupleTag;
 
@@ -39,7 +39,7 @@ import java.util.Map;
 public class DoFnFunction<InputT, OutputT>
     implements FlatMapFunction<Iterator<WindowedValue<InputT>>,
     WindowedValue<OutputT>> {
-  private final DoFn<InputT, OutputT> mFunction;
+  private final OldDoFn<InputT, OutputT> mFunction;
   private final SparkRuntimeContext mRuntimeContext;
   private final Map<TupleTag<?>, BroadcastHelper<?>> mSideInputs;
 
@@ -48,7 +48,7 @@ public class DoFnFunction<InputT, OutputT>
    * @param runtime    Runtime to apply function in.
    * @param sideInputs Side inputs used in DoFunction.
    */
-  public DoFnFunction(DoFn<InputT, OutputT> fn,
+  public DoFnFunction(OldDoFn<InputT, OutputT> fn,
                SparkRuntimeContext runtime,
                Map<TupleTag<?>, BroadcastHelper<?>> sideInputs) {
     this.mFunction = fn;
@@ -69,7 +69,7 @@ public class DoFnFunction<InputT, OutputT>
 
     private final List<WindowedValue<OutputT>> outputs = new LinkedList<>();
 
-    ProcCtxt(DoFn<InputT, OutputT> fn, SparkRuntimeContext runtimeContext, Map<TupleTag<?>,
+    ProcCtxt(OldDoFn<InputT, OutputT> fn, SparkRuntimeContext runtimeContext, Map<TupleTag<?>,
         BroadcastHelper<?>> sideInputs) {
       super(fn, runtimeContext, sideInputs);
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
index daa767d..e33578d 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
@@ -19,7 +19,7 @@
 package org.apache.beam.runners.spark.translation;
 
 import org.apache.beam.runners.spark.util.BroadcastHelper;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.TupleTag;
 
@@ -45,13 +45,13 @@ import scala.Tuple2;
  */
 class MultiDoFnFunction<InputT, OutputT>
     implements PairFlatMapFunction<Iterator<WindowedValue<InputT>>, TupleTag<?>, WindowedValue<?>> {
-  private final DoFn<InputT, OutputT> mFunction;
+  private final OldDoFn<InputT, OutputT> mFunction;
   private final SparkRuntimeContext mRuntimeContext;
   private final TupleTag<OutputT> mMainOutputTag;
   private final Map<TupleTag<?>, BroadcastHelper<?>> mSideInputs;
 
   MultiDoFnFunction(
-      DoFn<InputT, OutputT> fn,
+      OldDoFn<InputT, OutputT> fn,
       SparkRuntimeContext runtimeContext,
       TupleTag<OutputT> mainOutputTag,
       Map<TupleTag<?>, BroadcastHelper<?>> sideInputs) {
@@ -75,7 +75,7 @@ class MultiDoFnFunction<InputT, OutputT>
 
     private final Multimap<TupleTag<?>, WindowedValue<?>> outputs = LinkedListMultimap.create();
 
-    ProcCtxt(DoFn<InputT, OutputT> fn, SparkRuntimeContext runtimeContext, Map<TupleTag<?>,
+    ProcCtxt(OldDoFn<InputT, OutputT> fn, SparkRuntimeContext runtimeContext, Map<TupleTag<?>,
         BroadcastHelper<?>> sideInputs) {
       super(fn, runtimeContext, sideInputs);
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
index cad2a8e..58ac03c 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.util.TimerInternals;
@@ -50,17 +50,17 @@ import java.util.Map;
  * Spark runner process context.
  */
 public abstract class SparkProcessContext<InputT, OutputT, ValueT>
-    extends DoFn<InputT, OutputT>.ProcessContext {
+    extends OldDoFn<InputT, OutputT>.ProcessContext {
 
   private static final Logger LOG = LoggerFactory.getLogger(SparkProcessContext.class);
 
-  private final DoFn<InputT, OutputT> fn;
+  private final OldDoFn<InputT, OutputT> fn;
   private final SparkRuntimeContext mRuntimeContext;
   private final Map<TupleTag<?>, BroadcastHelper<?>> mSideInputs;
 
   protected WindowedValue<InputT> windowedValue;
 
-  SparkProcessContext(DoFn<InputT, OutputT> fn,
+  SparkProcessContext(OldDoFn<InputT, OutputT> fn,
       SparkRuntimeContext runtime,
       Map<TupleTag<?>, BroadcastHelper<?>> sideInputs) {
     fn.super();
@@ -135,9 +135,9 @@ public abstract class SparkProcessContext<InputT, OutputT, ValueT>
 
   @Override
   public BoundedWindow window() {
-    if (!(fn instanceof DoFn.RequiresWindowAccess)) {
+    if (!(fn instanceof OldDoFn.RequiresWindowAccess)) {
       throw new UnsupportedOperationException(
-          "window() is only available in the context of a DoFn marked as RequiresWindowAccess.");
+          "window() is only available in the context of a OldDoFn marked as RequiresWindowAccess.");
     }
     return Iterables.getOnlyElement(windowedValue.getWindows());
   }
@@ -200,7 +200,7 @@ public abstract class SparkProcessContext<InputT, OutputT, ValueT>
   protected abstract Iterator<ValueT> getOutputIterator();
 
   protected Iterable<ValueT> getOutputIterable(final Iterator<WindowedValue<InputT>> iter,
-                                               final DoFn<InputT, OutputT> doFn) {
+                                               final OldDoFn<InputT, OutputT> doFn) {
     return new Iterable<ValueT>() {
       @Override
       public Iterator<ValueT> iterator() {
@@ -212,11 +212,11 @@ public abstract class SparkProcessContext<InputT, OutputT, ValueT>
   private class ProcCtxtIterator extends AbstractIterator<ValueT> {
 
     private final Iterator<WindowedValue<InputT>> inputIterator;
-    private final DoFn<InputT, OutputT> doFn;
+    private final OldDoFn<InputT, OutputT> doFn;
     private Iterator<ValueT> outputIterator;
     private boolean calledFinish;
 
-    ProcCtxtIterator(Iterator<WindowedValue<InputT>> iterator, DoFn<InputT, OutputT> doFn) {
+    ProcCtxtIterator(Iterator<WindowedValue<InputT>> iterator, OldDoFn<InputT, OutputT> doFn) {
       this.inputIterator = iterator;
       this.doFn = doFn;
       this.outputIterator = getOutputIterator();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java
index c5d5802..c51a500 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java
@@ -39,8 +39,8 @@ import org.apache.beam.sdk.io.AvroIO;
 import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.View;
@@ -94,6 +94,7 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
+
 import scala.Tuple2;
 
 /**
@@ -203,7 +204,7 @@ public final class TransformTranslator {
         WindowingStrategy<?, W> windowingStrategy =
             (WindowingStrategy<?, W>) transform.getWindowingStrategy();
 
-        DoFn<KV<K, Iterable<WindowedValue<V>>>, KV<K, Iterable<V>>> gabwDoFn =
+        OldDoFn<KV<K, Iterable<WindowedValue<V>>>, KV<K, Iterable<V>>> gabwDoFn =
             new GroupAlsoByWindowsViaOutputBufferDoFn<K, V, Iterable<V>, W>(
                 windowingStrategy,
                 new InMemoryStateInternalsFactory<K>(),
@@ -768,7 +769,7 @@ public final class TransformTranslator {
                 && windowFn instanceof GlobalWindows)) {
           context.setOutputRDD(transform, inRDD);
         } else {
-          DoFn<T, T> addWindowsDoFn = new AssignWindowsDoFn<>(windowFn);
+          OldDoFn<T, T> addWindowsDoFn = new AssignWindowsDoFn<>(windowFn);
           DoFnFunction<T, T> dofn =
               new DoFnFunction<>(addWindowsDoFn, context.getRuntimeContext(), null);
           context.setOutputRDD(transform, inRDD.mapPartitions(dofn));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
index 8154cd7..b0fb931 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
@@ -32,8 +32,8 @@ import org.apache.beam.sdk.io.AvroIO;
 import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.FixedWindows;
@@ -315,7 +315,7 @@ public final class StreamingTransformTranslator {
           sec.setStream(transform, dStream.window(windowDuration, slideDuration));
         }
         //--- then we apply windowing to the elements
-        DoFn<T, T> addWindowsDoFn = new AssignWindowsDoFn<>(windowFn);
+        OldDoFn<T, T> addWindowsDoFn = new AssignWindowsDoFn<>(windowFn);
         DoFnFunction<T, T> dofn = new DoFnFunction<>(addWindowsDoFn,
             ((StreamingEvaluationContext) context).getRuntimeContext(), null);
         @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
index d1f8d12..e4a293f 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Keys;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.RemoveDuplicates;
@@ -101,7 +101,7 @@ public class TfIdfTest {
       // of the words in the document associated with that that URI.
       PCollection<KV<URI, String>> uriToWords = uriToContent
           .apply("SplitWords", ParDo.of(
-              new DoFn<KV<URI, String>, KV<URI, String>>() {
+              new OldDoFn<KV<URI, String>, KV<URI, String>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   URI uri = c.element().getKey();
@@ -144,7 +144,7 @@ public class TfIdfTest {
       // by the URI key.
       PCollection<KV<URI, KV<String, Long>>> uriToWordAndCount = uriAndWordToCount
           .apply("ShiftKeys", ParDo.of(
-              new DoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
+              new OldDoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   URI uri = c.element().getKey().getKey();
@@ -183,7 +183,7 @@ public class TfIdfTest {
       // divided by the total number of words in the document.
       PCollection<KV<String, KV<URI, Double>>> wordToUriAndTf = uriToWordAndCountAndTotal
           .apply("ComputeTermFrequencies", ParDo.of(
-              new DoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+              new OldDoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   URI uri = c.element().getKey();
@@ -208,7 +208,7 @@ public class TfIdfTest {
       PCollection<KV<String, Double>> wordToDf = wordToDocCount
           .apply("ComputeDocFrequencies", ParDo
               .withSideInputs(totalDocuments)
-              .of(new DoFn<KV<String, Long>, KV<String, Double>>() {
+              .of(new OldDoFn<KV<String, Long>, KV<String, Double>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   String word = c.element().getKey();
@@ -237,7 +237,7 @@ public class TfIdfTest {
       // divided by the log of the document frequency.
       return wordToUriAndTfAndDf
           .apply("ComputeTfIdf", ParDo.of(
-              new DoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+              new OldDoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   String word = c.element().getKey();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
index 600217d..2e477e9 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.coders.VarLongCoder;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -70,7 +70,7 @@ public class CombinePerKeyTest {
     private static class SumPerKey<T> extends PTransform<PCollection<T>, PCollection<KV<T, Long>>> {
       @Override
       public PCollection<KV<T, Long>> apply(PCollection<T> pcol) {
-          PCollection<KV<T, Long>> withLongs = pcol.apply(ParDo.of(new DoFn<T, KV<T, Long>>() {
+          PCollection<KV<T, Long>> withLongs = pcol.apply(ParDo.of(new OldDoFn<T, KV<T, Long>>() {
               @Override
               public void processElement(ProcessContext processContext) throws Exception {
                   processContext.output(KV.of(processContext.element(), 1L));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
index 0f60271..263ce99 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
@@ -25,7 +25,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.PCollection;
 
@@ -46,7 +46,7 @@ public class DoFnOutputTest implements Serializable {
     PCollection<String> strings = pipeline.apply(Create.of("a"));
     // Test that values written from startBundle() and finishBundle() are written to
     // the output
-    PCollection<String> output = strings.apply(ParDo.of(new DoFn<String, String>() {
+    PCollection<String> output = strings.apply(ParDo.of(new OldDoFn<String, String>() {
       @Override
       public void startBundle(Context c) throws Exception {
         c.output("start");

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
index ded3eb2..739eec3 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
@@ -30,9 +30,9 @@ import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.ApproximateUnique;
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.Max;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -103,9 +103,9 @@ public class MultiOutputWordCountTest {
   }
 
   /**
-   * A DoFn that tokenizes lines of text into individual words.
+   * A OldDoFn that tokenizes lines of text into individual words.
    */
-  static class ExtractWordsFn extends DoFn<String, String> {
+  static class ExtractWordsFn extends OldDoFn<String, String> {
 
     private final Aggregator<Integer, Integer> totalWords = createAggregator("totalWords",
         new Sum.SumIntegerFn());
@@ -170,7 +170,7 @@ public class MultiOutputWordCountTest {
     }
   }
 
-  private static class FormatCountsFn extends DoFn<KV<String, Long>, String> {
+  private static class FormatCountsFn extends OldDoFn<KV<String, Long>, String> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(c.element().getKey() + ": " + c.element().getValue());



[10/51] [abbrv] incubator-beam git commit: Update Dataflow runner to worker container supporting OldDoFn

Posted by ke...@apache.org.
Update Dataflow runner to worker container supporting OldDoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/3466a0e7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/3466a0e7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/3466a0e7

Branch: refs/heads/python-sdk
Commit: 3466a0e72d1f025c2d4562635a20730778b497b0
Parents: e07c339
Author: Kenneth Knowles <kl...@google.com>
Authored: Wed Aug 3 17:47:23 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/beam/runners/dataflow/DataflowRunner.java    | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3466a0e7/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
index d762d50..abcf415 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
@@ -212,9 +212,9 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
   // Default Docker container images that execute Dataflow worker harness, residing in Google
   // Container Registry, separately for Batch and Streaming.
   public static final String BATCH_WORKER_HARNESS_CONTAINER_IMAGE =
-      "dataflow.gcr.io/v1beta3/beam-java-batch:beam-master-20160714";
+      "dataflow.gcr.io/v1beta3/beam-java-batch:beam-master-20160804-dofn";
   public static final String STREAMING_WORKER_HARNESS_CONTAINER_IMAGE =
-      "dataflow.gcr.io/v1beta3/beam-java-streaming:beam-master-20160714";
+      "dataflow.gcr.io/v1beta3/beam-java-streaming:beam-master-20160804-dofn";
 
   // The limit of CreateJob request size.
   private static final int CREATE_JOB_REQUEST_LIMIT_BYTES = 10 * 1024 * 1024;


[19/51] [abbrv] incubator-beam git commit: Closes #743

Posted by ke...@apache.org.
Closes #743


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/3144363c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/3144363c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/3144363c

Branch: refs/heads/python-sdk
Commit: 3144363c067e8bd5d1b83aca2f4057b6bc33ed04
Parents: c314e67 ac0875d
Author: Dan Halperin <dh...@google.com>
Authored: Wed Aug 3 23:01:15 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 23:01:15 2016 -0700

----------------------------------------------------------------------
 .../runners/spark/SparkPipelineOptions.java     |  6 ++++
 .../apache/beam/runners/spark/SparkRunner.java  | 14 +++------
 .../runners/spark/SparkRunnerRegistrar.java     |  6 ++--
 .../spark/SparkStreamingPipelineOptions.java    | 32 --------------------
 .../beam/runners/spark/TestSparkRunner.java     |  2 --
 .../apache/beam/runners/spark/DeDupTest.java    |  2 +-
 .../beam/runners/spark/EmptyInputTest.java      |  2 +-
 .../beam/runners/spark/SimpleWordCountTest.java |  4 +--
 .../runners/spark/SparkRunnerRegistrarTest.java |  2 +-
 .../apache/beam/runners/spark/TfIdfTest.java    |  2 +-
 .../beam/runners/spark/io/AvroPipelineTest.java |  2 +-
 .../beam/runners/spark/io/NumShardsTest.java    |  2 +-
 .../io/hadoop/HadoopFileFormatPipelineTest.java |  2 +-
 .../spark/translation/CombineGloballyTest.java  |  2 +-
 .../spark/translation/CombinePerKeyTest.java    |  2 +-
 .../spark/translation/DoFnOutputTest.java       |  6 ++--
 .../translation/MultiOutputWordCountTest.java   |  2 +-
 .../spark/translation/SerializationTest.java    |  2 +-
 .../spark/translation/SideEffectsTest.java      |  8 ++---
 .../streaming/FlattenStreamingTest.java         |  8 ++---
 .../streaming/KafkaStreamingTest.java           | 13 ++++----
 .../streaming/SimpleStreamingWordCountTest.java |  8 ++---
 .../streaming/utils/EmbeddedKafkaCluster.java   |  4 ++-
 23 files changed, 49 insertions(+), 84 deletions(-)
----------------------------------------------------------------------



[35/51] [abbrv] incubator-beam git commit: Port join library to new DoFn

Posted by ke...@apache.org.
Port join library to new DoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/620bd994
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/620bd994
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/620bd994

Branch: refs/heads/python-sdk
Commit: 620bd9949a6176ddd1903687fe9b8ba8c5822367
Parents: a1c06d7
Author: Kenneth Knowles <kl...@google.com>
Authored: Wed Aug 3 19:55:21 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 14:56:42 2016 -0700

----------------------------------------------------------------------
 .../apache/beam/sdk/extensions/joinlibrary/Join.java  | 14 +++++++-------
 1 file changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/620bd994/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java b/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
index 88836f9..f4e6ccb 100644
--- a/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
+++ b/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
@@ -20,7 +20,7 @@ package org.apache.beam.sdk.extensions.joinlibrary;
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.join.CoGbkResult;
 import org.apache.beam.sdk.transforms.join.CoGroupByKey;
@@ -59,8 +59,8 @@ public class Join {
         .apply(CoGroupByKey.<K>create());
 
     return coGbkResultCollection.apply(ParDo.of(
-      new OldDoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
-        @Override
+      new DoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
+        @ProcessElement
         public void processElement(ProcessContext c) {
           KV<K, CoGbkResult> e = c.element();
 
@@ -108,8 +108,8 @@ public class Join {
         .apply(CoGroupByKey.<K>create());
 
     return coGbkResultCollection.apply(ParDo.of(
-      new OldDoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
-        @Override
+      new DoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
+        @ProcessElement
         public void processElement(ProcessContext c) {
           KV<K, CoGbkResult> e = c.element();
 
@@ -161,8 +161,8 @@ public class Join {
         .apply(CoGroupByKey.<K>create());
 
     return coGbkResultCollection.apply(ParDo.of(
-      new OldDoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
-        @Override
+      new DoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
+        @ProcessElement
         public void processElement(ProcessContext c) {
           KV<K, CoGbkResult> e = c.element();
 


[03/51] [abbrv] incubator-beam git commit: Rename DoFn to OldDoFn

Posted by ke...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
index 77c857c..7917aec 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
@@ -23,8 +23,8 @@ import static com.google.common.base.Preconditions.checkNotNull;
 import org.apache.beam.sdk.coders.AtomicCoder;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.CoderException;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.util.VarInt;
 
 import com.google.common.base.MoreObjects;
@@ -38,8 +38,8 @@ import java.util.Objects;
 /**
  * Provides information about the pane an element belongs to. Every pane is implicitly associated
  * with a window. Panes are observable only via the
- * {@link org.apache.beam.sdk.transforms.DoFn.ProcessContext#pane} method of the context
- * passed to a {@link DoFn#processElement} overridden method.
+ * {@link OldDoFn.ProcessContext#pane} method of the context
+ * passed to a {@link OldDoFn#processElement} overridden method.
  *
  * <p>Note: This does not uniquely identify a pane, and should not be used for comparisons.
  */
@@ -74,8 +74,8 @@ public final class PaneInfo {
    * definitions:
    * <ol>
    * <li>We'll call a pipeline 'simple' if it does not use
-   * {@link org.apache.beam.sdk.transforms.DoFn.Context#outputWithTimestamp} in
-   * any {@code DoFn}, and it uses the same
+   * {@link OldDoFn.Context#outputWithTimestamp} in
+   * any {@code OldDoFn}, and it uses the same
    * {@link org.apache.beam.sdk.transforms.windowing.Window.Bound#withAllowedLateness}
    * argument value on all windows (or uses the default of {@link org.joda.time.Duration#ZERO}).
    * <li>We'll call an element 'locally late', from the point of view of a computation on a

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
index fe8b66f..03ff481 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
@@ -21,8 +21,8 @@ import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.annotations.Experimental.Kind;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.Coder.NonDeterministicException;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -645,7 +645,7 @@ public class Window {
           // We first apply a (trivial) transform to the input PCollection to produce a new
           // PCollection. This ensures that we don't modify the windowing strategy of the input
           // which may be used elsewhere.
-          .apply("Identity", ParDo.of(new DoFn<T, T>() {
+          .apply("Identity", ParDo.of(new OldDoFn<T, T>() {
             @Override public void processElement(ProcessContext c) {
               c.output(c.element());
             }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java
index a62444f..dd36367 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java
@@ -107,7 +107,7 @@ public abstract class BaseExecutionContext<T extends ExecutionContext.StepContex
 
   /**
    * Hook for subclasses to implement that will be called whenever
-   * {@link org.apache.beam.sdk.transforms.DoFn.Context#output}
+   * {@link OldDoFn.Context#output}
    * is called.
    */
   @Override
@@ -115,7 +115,7 @@ public abstract class BaseExecutionContext<T extends ExecutionContext.StepContex
 
   /**
    * Hook for subclasses to implement that will be called whenever
-   * {@link org.apache.beam.sdk.transforms.DoFn.Context#sideOutput}
+   * {@link OldDoFn.Context#sideOutput}
    * is called.
    */
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java
index ce35c24..e14aec8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java
@@ -21,6 +21,7 @@ package org.apache.beam.sdk.util;
 import static com.google.common.base.Preconditions.checkState;
 
 import org.apache.beam.sdk.transforms.Combine;
+
 import java.util.HashMap;
 import java.util.Map;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineContextFactory.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineContextFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineContextFactory.java
index f73fae3..149d276 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineContextFactory.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineContextFactory.java
@@ -19,7 +19,7 @@ package org.apache.beam.sdk.util;
 
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.CombineWithContext.Context;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.state.StateContext;
 import org.apache.beam.sdk.values.PCollectionView;
@@ -49,9 +49,9 @@ public class CombineContextFactory {
   }
 
   /**
-   * Returns a {@code Combine.Context} that wraps a {@code DoFn.ProcessContext}.
+   * Returns a {@code Combine.Context} that wraps a {@code OldDoFn.ProcessContext}.
    */
-  public static Context createFromProcessContext(final DoFn<?, ?>.ProcessContext c) {
+  public static Context createFromProcessContext(final OldDoFn<?, ?>.ProcessContext c) {
     return new Context() {
       @Override
       public PipelineOptions getPipelineOptions() {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java
index 01bde82..1c2f554 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java
@@ -42,14 +42,14 @@ public interface ExecutionContext {
 
   /**
    * Hook for subclasses to implement that will be called whenever
-   * {@link org.apache.beam.sdk.transforms.DoFn.Context#output}
+   * {@link OldDoFn.Context#output}
    * is called.
    */
   void noteOutput(WindowedValue<?> output);
 
   /**
    * Hook for subclasses to implement that will be called whenever
-   * {@link org.apache.beam.sdk.transforms.DoFn.Context#sideOutput}
+   * {@link OldDoFn.Context#sideOutput}
    * is called.
    */
   void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output);
@@ -71,14 +71,14 @@ public interface ExecutionContext {
 
     /**
      * Hook for subclasses to implement that will be called whenever
-     * {@link org.apache.beam.sdk.transforms.DoFn.Context#output}
+     * {@link OldDoFn.Context#output}
      * is called.
      */
     void noteOutput(WindowedValue<?> output);
 
     /**
      * Hook for subclasses to implement that will be called whenever
-     * {@link org.apache.beam.sdk.transforms.DoFn.Context#sideOutput}
+     * {@link OldDoFn.Context#sideOutput}
      * is called.
      */
     void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java
index 96802ae..eb0a91a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java
@@ -21,6 +21,7 @@ package org.apache.beam.sdk.util;
 import static com.google.common.base.Preconditions.checkArgument;
 
 import org.apache.beam.sdk.transforms.Combine;
+
 import java.util.Arrays;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java
index 9dc4f68..ae3d391 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java
@@ -19,7 +19,7 @@ package org.apache.beam.sdk.util;
 
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.CombineFnBase.PerKeyCombineFn;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 
 import java.io.Serializable;
@@ -43,62 +43,62 @@ public interface PerKeyCombineFnRunner<K, InputT, AccumT, OutputT> extends Seria
   /////////////////////////////////////////////////////////////////////////////
 
   /**
-   * Forwards the call to a {@link PerKeyCombineFn} to create the accumulator in a {@link DoFn}.
+   * Forwards the call to a {@link PerKeyCombineFn} to create the accumulator in a {@link OldDoFn}.
    *
-   * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+   * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
    * if it is required.
    */
-  public AccumT createAccumulator(K key, DoFn<?, ?>.ProcessContext c);
+  public AccumT createAccumulator(K key, OldDoFn<?, ?>.ProcessContext c);
 
   /**
-   * Forwards the call to a {@link PerKeyCombineFn} to add the input in a {@link DoFn}.
+   * Forwards the call to a {@link PerKeyCombineFn} to add the input in a {@link OldDoFn}.
    *
-   * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+   * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
    * if it is required.
    */
-  public AccumT addInput(K key, AccumT accumulator, InputT input, DoFn<?, ?>.ProcessContext c);
+  public AccumT addInput(K key, AccumT accumulator, InputT input, OldDoFn<?, ?>.ProcessContext c);
 
   /**
-   * Forwards the call to a {@link PerKeyCombineFn} to merge accumulators in a {@link DoFn}.
+   * Forwards the call to a {@link PerKeyCombineFn} to merge accumulators in a {@link OldDoFn}.
    *
-   * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+   * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
    * if it is required.
    */
   public AccumT mergeAccumulators(
-      K key, Iterable<AccumT> accumulators, DoFn<?, ?>.ProcessContext c);
+      K key, Iterable<AccumT> accumulators, OldDoFn<?, ?>.ProcessContext c);
 
   /**
-   * Forwards the call to a {@link PerKeyCombineFn} to extract the output in a {@link DoFn}.
+   * Forwards the call to a {@link PerKeyCombineFn} to extract the output in a {@link OldDoFn}.
    *
-   * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+   * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
    * if it is required.
    */
-  public OutputT extractOutput(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c);
+  public OutputT extractOutput(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c);
 
   /**
-   * Forwards the call to a {@link PerKeyCombineFn} to compact the accumulator in a {@link DoFn}.
+   * Forwards the call to a {@link PerKeyCombineFn} to compact the accumulator in a {@link OldDoFn}.
    *
-   * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+   * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
    * if it is required.
    */
-  public AccumT compact(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c);
+  public AccumT compact(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c);
 
   /**
    * Forwards the call to a {@link PerKeyCombineFn} to combine the inputs and extract output
-   * in a {@link DoFn}.
+   * in a {@link OldDoFn}.
    *
-   * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+   * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
    * if it is required.
    */
-  public OutputT apply(K key, Iterable<? extends InputT> inputs, DoFn<?, ?>.ProcessContext c);
+  public OutputT apply(K key, Iterable<? extends InputT> inputs, OldDoFn<?, ?>.ProcessContext c);
 
   /**
-   * Forwards the call to a {@link PerKeyCombineFn} to add all inputs in a {@link DoFn}.
+   * Forwards the call to a {@link PerKeyCombineFn} to add all inputs in a {@link OldDoFn}.
    *
-   * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+   * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
    * if it is required.
    */
-  public AccumT addInputs(K key, Iterable<InputT> inputs, DoFn<?, ?>.ProcessContext c);
+  public AccumT addInputs(K key, Iterable<InputT> inputs, OldDoFn<?, ?>.ProcessContext c);
 
   /////////////////////////////////////////////////////////////////////////////
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java
index 2d28682..87870a8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.transforms.CombineFnBase.PerKeyCombineFn;
 import org.apache.beam.sdk.transforms.CombineWithContext;
 import org.apache.beam.sdk.transforms.CombineWithContext.KeyedCombineFnWithContext;
 import org.apache.beam.sdk.transforms.CombineWithContext.RequiresContextInternal;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 
 import com.google.common.collect.Iterables;
@@ -69,39 +69,39 @@ public class PerKeyCombineFnRunners {
     }
 
     @Override
-    public AccumT createAccumulator(K key, DoFn<?, ?>.ProcessContext c) {
+    public AccumT createAccumulator(K key, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFn.createAccumulator(key);
     }
 
     @Override
     public AccumT addInput(
-        K key, AccumT accumulator, InputT input, DoFn<?, ?>.ProcessContext c) {
+        K key, AccumT accumulator, InputT input, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFn.addInput(key, accumulator, input);
     }
 
     @Override
     public AccumT mergeAccumulators(
-        K key, Iterable<AccumT> accumulators, DoFn<?, ?>.ProcessContext c) {
+        K key, Iterable<AccumT> accumulators, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFn.mergeAccumulators(key, accumulators);
     }
 
     @Override
-    public OutputT extractOutput(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c) {
+    public OutputT extractOutput(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFn.extractOutput(key, accumulator);
     }
 
     @Override
-    public AccumT compact(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c) {
+    public AccumT compact(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFn.compact(key, accumulator);
     }
 
     @Override
-    public OutputT apply(K key, Iterable<? extends InputT> inputs, DoFn<?, ?>.ProcessContext c) {
+    public OutputT apply(K key, Iterable<? extends InputT> inputs, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFn.apply(key, inputs);
     }
 
     @Override
-    public AccumT addInputs(K key, Iterable<InputT> inputs, DoFn<?, ?>.ProcessContext c) {
+    public AccumT addInputs(K key, Iterable<InputT> inputs, OldDoFn<?, ?>.ProcessContext c) {
       AccumT accum = keyedCombineFn.createAccumulator(key);
       for (InputT input : inputs) {
         accum = keyedCombineFn.addInput(key, accum, input);
@@ -165,45 +165,45 @@ public class PerKeyCombineFnRunners {
     }
 
     @Override
-    public AccumT createAccumulator(K key, DoFn<?, ?>.ProcessContext c) {
+    public AccumT createAccumulator(K key, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFnWithContext.createAccumulator(key,
           CombineContextFactory.createFromProcessContext(c));
     }
 
     @Override
     public AccumT addInput(
-        K key, AccumT accumulator, InputT value, DoFn<?, ?>.ProcessContext c) {
+        K key, AccumT accumulator, InputT value, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFnWithContext.addInput(key, accumulator, value,
           CombineContextFactory.createFromProcessContext(c));
     }
 
     @Override
     public AccumT mergeAccumulators(
-        K key, Iterable<AccumT> accumulators, DoFn<?, ?>.ProcessContext c) {
+        K key, Iterable<AccumT> accumulators, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFnWithContext.mergeAccumulators(
           key, accumulators, CombineContextFactory.createFromProcessContext(c));
     }
 
     @Override
-    public OutputT extractOutput(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c) {
+    public OutputT extractOutput(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFnWithContext.extractOutput(key, accumulator,
           CombineContextFactory.createFromProcessContext(c));
     }
 
     @Override
-    public AccumT compact(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c) {
+    public AccumT compact(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFnWithContext.compact(key, accumulator,
           CombineContextFactory.createFromProcessContext(c));
     }
 
     @Override
-    public OutputT apply(K key, Iterable<? extends InputT> inputs, DoFn<?, ?>.ProcessContext c) {
+    public OutputT apply(K key, Iterable<? extends InputT> inputs, OldDoFn<?, ?>.ProcessContext c) {
       return keyedCombineFnWithContext.apply(key, inputs,
           CombineContextFactory.createFromProcessContext(c));
     }
 
     @Override
-    public AccumT addInputs(K key, Iterable<InputT> inputs, DoFn<?, ?>.ProcessContext c) {
+    public AccumT addInputs(K key, Iterable<InputT> inputs, OldDoFn<?, ?>.ProcessContext c) {
       CombineWithContext.Context combineContext = CombineContextFactory.createFromProcessContext(c);
       AccumT accum = keyedCombineFnWithContext.createAccumulator(key, combineContext);
       for (InputT input : inputs) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java
index 36c4a9f..9e6c7d2 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java
@@ -34,6 +34,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ThreadLocalRandom;
+
 import javax.annotation.Nullable;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java
index 9fa0380..88ae6cc 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java
@@ -34,6 +34,7 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+
 import javax.annotation.Nullable;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReifyTimestampAndWindowsDoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReifyTimestampAndWindowsDoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReifyTimestampAndWindowsDoFn.java
index c2273f5..2808ca9 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReifyTimestampAndWindowsDoFn.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReifyTimestampAndWindowsDoFn.java
@@ -17,11 +17,11 @@
  */
 package org.apache.beam.sdk.util;
 
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.values.KV;
 
 /**
- * DoFn that makes timestamps and window assignments explicit in the value part of each key/value
+ * OldDoFn that makes timestamps and window assignments explicit in the value part of each key/value
  * pair.
  *
  * @param <K> the type of the keys of the input and output {@code PCollection}s
@@ -29,7 +29,7 @@ import org.apache.beam.sdk.values.KV;
  */
 @SystemDoFnInternal
 public class ReifyTimestampAndWindowsDoFn<K, V>
-    extends DoFn<KV<K, V>, KV<K, WindowedValue<V>>> {
+    extends OldDoFn<KV<K, V>, KV<K, WindowedValue<V>>> {
   @Override
   public void processElement(ProcessContext c)
       throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java
index 6c58689..66c7cc0 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java
@@ -17,8 +17,8 @@
  */
 package org.apache.beam.sdk.util;
 
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -70,7 +70,7 @@ public class Reshuffle<K, V> extends PTransform<PCollection<KV<K, V>>, PCollecti
         // set allowed lateness.
         .setWindowingStrategyInternal(originalStrategy)
         .apply("ExpandIterable", ParDo.of(
-            new DoFn<KV<K, Iterable<V>>, KV<K, V>>() {
+            new OldDoFn<KV<K, Iterable<V>>, KV<K, V>>() {
               @Override
               public void processElement(ProcessContext c) {
                 K key = c.element().getKey();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java
index 45f6c4a..1e70aaf 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java
@@ -105,7 +105,7 @@ public class SerializableUtils {
    */
   public static CloudObject ensureSerializable(Coder<?> coder) {
     // Make sure that Coders are java serializable as well since
-    // they are regularly captured within DoFn's.
+    // they are regularly captured within OldDoFn's.
     Coder<?> copy = (Coder<?>) ensureSerializable((Serializable) coder);
 
     CloudObject cloudObject = copy.asCloudObject();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java
index 53201a4..bb59373 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java
@@ -98,7 +98,7 @@ public class StringUtils {
   }
 
   private static final String[] STANDARD_NAME_SUFFIXES =
-      new String[]{"DoFn", "Fn"};
+      new String[]{"OldDoFn", "Fn"};
 
   /**
    * Pattern to match a non-anonymous inner class.

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java
index 9a42b23..b8a5cd4 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java
@@ -17,7 +17,7 @@
  */
 package org.apache.beam.sdk.util;
 
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 
 import java.lang.annotation.Documented;
 import java.lang.annotation.ElementType;
@@ -26,10 +26,10 @@ import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
 /**
- * Annotation to mark {@link DoFn DoFns} as an internal component of the Dataflow SDK.
+ * Annotation to mark {@link OldDoFn DoFns} as an internal component of the Dataflow SDK.
  *
  * <p>Currently, the only effect of this is to mark any aggregators reported by an annotated
- * {@code DoFn} as a system counter (as opposed to a user counter).
+ * {@code OldDoFn} as a system counter (as opposed to a user counter).
  *
  * <p>This is internal to the Dataflow SDK.
  */

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java
index c03ab4d..3212d64 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java
@@ -33,7 +33,6 @@ import com.google.common.base.MoreObjects;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-
 import org.joda.time.Instant;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java
index e724349..f0e4812 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java
@@ -22,7 +22,7 @@ import static com.google.common.base.Preconditions.checkArgument;
 import org.apache.beam.sdk.coders.ByteArrayCoder;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.StandardCoder;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 
 import com.google.common.base.MoreObjects;
 
@@ -139,8 +139,8 @@ public class ValueWithRecordId<ValueT> {
     ByteArrayCoder idCoder;
   }
 
-  /** {@link DoFn} to turn a {@code ValueWithRecordId<T>} back to the value {@code T}. */
-  public static class StripIdsDoFn<T> extends DoFn<ValueWithRecordId<T>, T> {
+  /** {@link OldDoFn} to turn a {@code ValueWithRecordId<T>} back to the value {@code T}. */
+  public static class StripIdsDoFn<T> extends OldDoFn<ValueWithRecordId<T>, T> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(c.element().getValue());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java
index 676848c..9d341a1 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java
@@ -38,7 +38,6 @@ import com.google.common.collect.ImmutableList;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-
 import org.joda.time.Instant;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java
index 149c497..3a1b654 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java
@@ -30,8 +30,8 @@ import java.io.IOException;
 import java.util.Collection;
 
 /**
- * Interface that may be required by some (internal) {@code DoFn}s to implement windowing. It should
- * not be necessary for general user code to interact with this at all.
+ * Interface that may be required by some (internal) {@code OldDoFn}s to implement windowing. It
+ * should not be necessary for general user code to interact with this at all.
  *
  * <p>This interface should be provided by runner implementors to support windowing on their runner.
  *

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java
index 75b8ad8..6db532e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.util.common;
 
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
+
 import static java.util.Arrays.asList;
 
 import com.google.common.base.Function;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java
index b60a53e..69bf77d 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java
@@ -27,7 +27,6 @@ import org.apache.beam.sdk.util.PropertyNames;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-
 import org.joda.time.Instant;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
index 8abfb05..5137031 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
@@ -36,8 +36,8 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.UserCodeException;
@@ -146,9 +146,9 @@ public class PipelineTest {
 
   private static PTransform<PCollection<? extends String>, PCollection<String>> addSuffix(
       final String suffix) {
-    return ParDo.of(new DoFn<String, String>() {
+    return ParDo.of(new OldDoFn<String, String>() {
       @Override
-      public void processElement(DoFn<String, String>.ProcessContext c) {
+      public void processElement(OldDoFn<String, String>.ProcessContext c) {
         c.output(c.element() + suffix);
       }
     });

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
index 54f7ec1..41d0932 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
@@ -31,7 +31,7 @@ import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.CloudObject;
 import org.apache.beam.sdk.util.SerializableUtils;
@@ -134,7 +134,7 @@ public class AvroCoderTest {
     }
   }
 
-  private static class GetTextFn extends DoFn<Pojo, String> {
+  private static class GetTextFn extends OldDoFn<Pojo, String> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(c.element().text);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
index 817ea20..35ec6c6 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.coders.protobuf.ProtoCoder;
 import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.CloudObject;
@@ -366,7 +366,7 @@ public class CoderRegistryTest {
   private static class PTransformOutputingMySerializableGeneric
   extends PTransform<PCollection<String>, PCollection<KV<String, MySerializableGeneric<String>>>> {
 
-    private class OutputDoFn extends DoFn<String, KV<String, MySerializableGeneric<String>>> {
+    private class OutputDoFn extends OldDoFn<String, KV<String, MySerializableGeneric<String>>> {
       @Override
       public void processElement(ProcessContext c) { }
     }
@@ -430,7 +430,7 @@ public class CoderRegistryTest {
       PCollection<String>,
       PCollection<KV<String, MySerializableGeneric<T>>>> {
 
-    private class OutputDoFn extends DoFn<String, KV<String, MySerializableGeneric<T>>> {
+    private class OutputDoFn extends OldDoFn<String, KV<String, MySerializableGeneric<T>>> {
       @Override
       public void processElement(ProcessContext c) { }
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
index d6423e5..3e7fd50 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.CloudObject;
 import org.apache.beam.sdk.util.CoderUtils;
@@ -82,14 +82,14 @@ public class SerializableCoderTest implements Serializable {
     }
   }
 
-  static class StringToRecord extends DoFn<String, MyRecord> {
+  static class StringToRecord extends OldDoFn<String, MyRecord> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(new MyRecord(c.element()));
     }
   }
 
-  static class RecordToString extends DoFn<MyRecord, String> {
+  static class RecordToString extends OldDoFn<MyRecord, String> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(c.element().value);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java
index c7153f8..09405ab 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java
@@ -18,6 +18,7 @@
 package org.apache.beam.sdk.io;
 
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
 import static org.hamcrest.Matchers.containsInAnyOrder;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java
index cabfc21..fe9415b 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java
@@ -18,6 +18,7 @@
 package org.apache.beam.sdk.io;
 
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
 import static org.hamcrest.Matchers.containsInAnyOrder;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java
index 8fbed94..01e5fe5 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.io;
 
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
 import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.instanceOf;
 import static org.hamcrest.Matchers.not;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
index c5f7478..95f7454 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
@@ -18,6 +18,7 @@
 package org.apache.beam.sdk.io;
 
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
 import static org.hamcrest.Matchers.is;
 import static org.junit.Assert.assertThat;
 
@@ -28,9 +29,9 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Max;
 import org.apache.beam.sdk.transforms.Min;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.RemoveDuplicates;
@@ -119,7 +120,7 @@ public class CountingInputTest {
     assertThat(endTime.isAfter(startTime.plus(expectedRuntimeMillis)), is(true));
   }
 
-  private static class ElementValueDiff extends DoFn<Long, Long> {
+  private static class ElementValueDiff extends OldDoFn<Long, Long> {
     @Override
     public void processElement(ProcessContext c) throws Exception {
       c.output(c.element() - c.timestamp().getMillis());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
index 321f066..45f636f 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
@@ -34,10 +34,10 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.Max;
 import org.apache.beam.sdk.transforms.Min;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.RemoveDuplicates;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -159,7 +159,7 @@ public class CountingSourceTest {
     p.run();
   }
 
-  private static class ElementValueDiff extends DoFn<Long, Long> {
+  private static class ElementValueDiff extends OldDoFn<Long, Long> {
     @Override
     public void processElement(ProcessContext c) throws Exception {
       c.output(c.element() - c.timestamp().getMillis());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java
index 7009023..f689f51 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.io;
 
 import static org.apache.beam.sdk.testing.SourceTestUtils.assertSplitAtFractionExhaustive;
 import static org.apache.beam.sdk.testing.SourceTestUtils.readFromSource;
+
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
index 9c75972..f8592c9 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.testing.CoderProperties;
 import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.PubsubClient;
 import org.apache.beam.sdk.util.PubsubClient.OutgoingMessage;
@@ -58,7 +58,7 @@ public class PubsubUnboundedSinkTest {
   private static final String ID_LABEL = "id";
   private static final int NUM_SHARDS = 10;
 
-  private static class Stamp extends DoFn<String, String> {
+  private static class Stamp extends OldDoFn<String, String> {
     @Override
     public void processElement(ProcessContext c) {
       c.outputWithTimestamp(c.element(), new Instant(TIMESTAMP));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java
index 237c025..a47ddf2 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java
@@ -37,6 +37,7 @@ import org.junit.runners.JUnit4;
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.List;
+
 import javax.annotation.Nullable;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java
index a1f1f70..6ec3a71 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java
@@ -23,6 +23,7 @@ import static org.apache.beam.sdk.TestUtils.NO_INTS_ARRAY;
 import static org.apache.beam.sdk.TestUtils.NO_LINES_ARRAY;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue;
+
 import static org.hamcrest.Matchers.containsInAnyOrder;
 import static org.hamcrest.Matchers.hasItem;
 import static org.hamcrest.Matchers.startsWith;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java
index 0af0744..4b6e749 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.io;
 
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
 import static org.hamcrest.Matchers.anyOf;
 import static org.hamcrest.Matchers.containsInAnyOrder;
 import static org.hamcrest.Matchers.equalTo;
@@ -40,9 +41,9 @@ import org.apache.beam.sdk.options.PipelineOptionsFactoryTest.TestPipelineOption
 import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SimpleFunction;
@@ -101,14 +102,14 @@ public class WriteTest {
       this.window = window;
     }
 
-    private static class AddArbitraryKey<T> extends DoFn<T, KV<Integer, T>> {
+    private static class AddArbitraryKey<T> extends OldDoFn<T, KV<Integer, T>> {
       @Override
       public void processElement(ProcessContext c) throws Exception {
         c.output(KV.of(ThreadLocalRandom.current().nextInt(), c.element()));
       }
     }
 
-    private static class RemoveArbitraryKey<T> extends DoFn<KV<Integer, Iterable<T>>, T> {
+    private static class RemoveArbitraryKey<T> extends OldDoFn<KV<Integer, Iterable<T>>, T> {
       @Override
       public void processElement(ProcessContext c) throws Exception {
         for (T s : c.element().getValue()) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java
index 98aee4e..ea0db73 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java
@@ -46,6 +46,7 @@ import java.nio.channels.WritableByteChannel;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.bind.annotation.XmlType;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java
index 22359dc..ec2902e 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java
@@ -18,6 +18,7 @@
 package org.apache.beam.sdk.options;
 
 import static com.google.common.base.Strings.isNullOrEmpty;
+
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java
index 546fe7d..8e1439b 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java
@@ -29,7 +29,6 @@ import com.google.api.services.bigquery.Bigquery.Datasets.Delete;
 import com.google.api.services.storage.Storage;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java
index 8b8337e..0c1b596 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java
@@ -43,7 +43,6 @@ import com.google.common.collect.ListMultimap;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
-
 import org.hamcrest.Matchers;
 import org.junit.Rule;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java
index 687271c..b2efa61 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java
@@ -28,7 +28,6 @@ import com.google.common.collect.Sets;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.databind.ObjectMapper;
-
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java
index 110f30a..c4c5c1c 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java
@@ -22,6 +22,7 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasKey;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasNamespace;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasType;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue;
+
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.allOf;
 import static org.hamcrest.Matchers.hasItem;
@@ -43,7 +44,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
-
 import org.hamcrest.Matchers;
 import org.joda.time.Instant;
 import org.junit.Rule;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/AggregatorPipelineExtractorTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/AggregatorPipelineExtractorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/AggregatorPipelineExtractorTest.java
index 74cc5e0..13476e2 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/AggregatorPipelineExtractorTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/AggregatorPipelineExtractorTest.java
@@ -27,9 +27,9 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.Pipeline.PipelineVisitor;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Max;
 import org.apache.beam.sdk.transforms.Min;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -211,7 +211,7 @@ public class AggregatorPipelineExtractorTest {
     }
   }
 
-  private static class AggregatorProvidingDoFn<InT, OuT> extends DoFn<InT, OuT> {
+  private static class AggregatorProvidingDoFn<InT, OuT> extends OldDoFn<InT, OuT> {
     public <InputT, OutT> Aggregator<InputT, OutT> addAggregator(
         CombineFn<InputT, ?, OutT> combiner) {
       return createAggregator(randomName(), combiner);
@@ -222,7 +222,7 @@ public class AggregatorPipelineExtractorTest {
     }
 
     @Override
-    public void processElement(DoFn<InT, OuT>.ProcessContext c) throws Exception {
+    public void processElement(OldDoFn<InT, OuT>.ProcessContext c) throws Exception {
       fail();
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java
index 1070dab..acc2b48 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java
@@ -39,7 +39,6 @@ import org.apache.beam.sdk.values.TimestampedValue;
 import com.google.common.collect.Iterables;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
-
 import org.joda.time.Duration;
 import org.joda.time.Instant;
 import org.junit.Rule;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java
index 043c06c..0bd7893 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java
@@ -31,7 +31,6 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Create;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-
 import org.hamcrest.BaseMatcher;
 import org.hamcrest.Description;
 import org.junit.Rule;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java
index 8c2451b..fc10d4b 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.transforms;
 
 import static org.apache.beam.sdk.TestUtils.checkCombineFn;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
index 1a42947..5c8732f 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
@@ -18,6 +18,7 @@
 package org.apache.beam.sdk.transforms;
 
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
@@ -53,7 +54,7 @@ import java.util.List;
  */
 @RunWith(JUnit4.class)
 public class ApproximateUniqueTest implements Serializable {
-  // implements Serializable just to make it easy to use anonymous inner DoFn subclasses
+  // implements Serializable just to make it easy to use anonymous inner OldDoFn subclasses
 
   @Test
   public void testEstimationErrorToSampleSize() {
@@ -222,7 +223,7 @@ public class ApproximateUniqueTest implements Serializable {
             .apply(View.<Long>asSingleton());
 
     PCollection<KV<Long, Long>> approximateAndExact = approximate
-        .apply(ParDo.of(new DoFn<Long, KV<Long, Long>>() {
+        .apply(ParDo.of(new OldDoFn<Long, KV<Long, Long>>() {
               @Override
               public void processElement(ProcessContext c) {
                 c.output(KV.of(c.element(), c.sideInput(exact)));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
index 486c738..d6bf826 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
@@ -461,7 +461,7 @@ public class  CombineFnsTest {
   }
 
   private static class ExtractResultDoFn
-      extends DoFn<KV<String, CoCombineResult>, KV<String, KV<Integer, String>>>{
+      extends OldDoFn<KV<String, CoCombineResult>, KV<String, KV<Integer, String>>> {
 
     private final TupleTag<Integer> maxIntTag;
     private final TupleTag<UserString> concatStringTag;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
index b453089..cb9928e 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
@@ -25,6 +25,7 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.include
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
+
 import static org.hamcrest.Matchers.hasItem;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
@@ -73,7 +74,6 @@ import com.google.common.collect.Sets;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-
 import org.hamcrest.Matchers;
 import org.joda.time.Duration;
 import org.junit.Test;
@@ -117,7 +117,7 @@ public class CombineTest implements Serializable {
     1, 1, 2, 3, 5, 8, 13, 21, 34, 55
   };
 
-  @Mock private DoFn<?, ?>.ProcessContext processContext;
+  @Mock private OldDoFn<?, ?>.ProcessContext processContext;
 
   PCollection<KV<String, Integer>> createInput(Pipeline p,
                                                KV<String, Integer>[] table) {
@@ -372,7 +372,7 @@ public class CombineTest implements Serializable {
     pipeline.run();
   }
 
-  private static class FormatPaneInfo extends DoFn<Integer, String> {
+  private static class FormatPaneInfo extends OldDoFn<Integer, String> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(c.element() + ": " + c.pane().isLast());
@@ -560,7 +560,7 @@ public class CombineTest implements Serializable {
     pipeline.run();
   }
 
-  private static class GetLast extends DoFn<Integer, Integer> {
+  private static class GetLast extends OldDoFn<Integer, Integer> {
     @Override
     public void processElement(ProcessContext c) {
       if (c.pane().isLast()) {
@@ -653,7 +653,7 @@ public class CombineTest implements Serializable {
 
     PCollection<Integer> output = pipeline
         .apply("CreateVoidMainInput", Create.of((Void) null))
-        .apply("OutputSideInput", ParDo.of(new DoFn<Void, Integer>() {
+        .apply("OutputSideInput", ParDo.of(new OldDoFn<Void, Integer>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.output(c.sideInput(view));
@@ -1176,7 +1176,7 @@ public class CombineTest implements Serializable {
   }
 
   private static <T> PCollection<T> copy(PCollection<T> pc, final int n) {
-    return pc.apply(ParDo.of(new DoFn<T, T>() {
+    return pc.apply(ParDo.of(new OldDoFn<T, T>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {
         for (int i = 0; i < n; i++) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
index 07ba002..cf65423 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
@@ -229,7 +229,7 @@ public class CreateTest {
     p.run();
   }
 
-  private static class PrintTimestamps extends DoFn<String, String> {
+  private static class PrintTimestamps extends OldDoFn<String, String> {
     @Override
       public void processElement(ProcessContext c) {
       c.output(c.element() + ":" + c.timestamp().getMillis());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnContextTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnContextTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnContextTest.java
deleted file mode 100644
index 2e588c7..0000000
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnContextTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.transforms;
-
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-
-/**
- * Tests for {@link DoFn.Context}.
- */
-@RunWith(JUnit4.class)
-public class DoFnContextTest {
-
-  @Mock
-  private Aggregator<Long, Long> agg;
-
-  private DoFn<Object, Object> fn;
-  private DoFn<Object, Object>.Context context;
-
-  @Before
-  public void setup() {
-    MockitoAnnotations.initMocks(this);
-
-    // Need to be real objects to call the constructor, and to reference the
-    // outer instance of DoFn
-    NoOpDoFn<Object, Object> noOpFn = new NoOpDoFn<>();
-    DoFn<Object, Object>.Context noOpContext = noOpFn.context();
-
-    fn = spy(noOpFn);
-    context = spy(noOpContext);
-  }
-
-  @Test
-  public void testSetupDelegateAggregatorsCreatesAndLinksDelegateAggregators() {
-    Sum.SumLongFn combiner = new Sum.SumLongFn();
-    Aggregator<Long, Long> delegateAggregator =
-        fn.createAggregator("test", combiner);
-
-    when(context.createAggregatorInternal("test", combiner)).thenReturn(agg);
-
-    context.setupDelegateAggregators();
-    delegateAggregator.addValue(1L);
-
-    verify(agg).addValue(1L);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
index bf9899c..2488042 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
@@ -24,7 +24,7 @@ import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.verify;
 
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn.DelegatingAggregator;
+import org.apache.beam.sdk.transforms.OldDoFn.DelegatingAggregator;
 
 import org.junit.Before;
 import org.junit.Rule;
@@ -36,7 +36,7 @@ import org.mockito.Mock;
 import org.mockito.MockitoAnnotations;
 
 /**
- * Tests for DoFn.DelegatingAggregator.
+ * Tests for OldDoFn.DelegatingAggregator.
  */
 @RunWith(JUnit4.class)
 public class DoFnDelegatingAggregatorTest {
@@ -54,7 +54,7 @@ public class DoFnDelegatingAggregatorTest {
 
   @Test
   public void testAddValueWithoutDelegateThrowsException() {
-    DoFn<Void, Void> doFn = new NoOpDoFn<>();
+    OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
 
     String name = "agg";
     CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
@@ -64,7 +64,7 @@ public class DoFnDelegatingAggregatorTest {
 
     thrown.expect(IllegalStateException.class);
     thrown.expectMessage("cannot be called");
-    thrown.expectMessage("DoFn");
+    thrown.expectMessage("OldDoFn");
 
     aggregator.addValue(21.2);
   }
@@ -74,7 +74,7 @@ public class DoFnDelegatingAggregatorTest {
     String name = "agg";
     CombineFn<Long, ?, Long> combiner = mockCombineFn(Long.class);
 
-    DoFn<Void, Void> doFn = new NoOpDoFn<>();
+    OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
 
     DelegatingAggregator<Long, Long> aggregator =
         (DelegatingAggregator<Long, Long>) doFn.createAggregator(name, combiner);
@@ -91,7 +91,7 @@ public class DoFnDelegatingAggregatorTest {
     String name = "agg";
     CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
 
-    DoFn<Void, Void> doFn = new NoOpDoFn<>();
+    OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
 
     DelegatingAggregator<Double, Double> aggregator =
         (DelegatingAggregator<Double, Double>) doFn.createAggregator(name, combiner);
@@ -114,7 +114,7 @@ public class DoFnDelegatingAggregatorTest {
     String name = "agg";
     CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
 
-    DoFn<Void, Void> doFn = new NoOpDoFn<>();
+    OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
 
     DelegatingAggregator<Double, Double> aggregator =
         (DelegatingAggregator<Double, Double>) doFn.createAggregator(name, combiner);
@@ -127,7 +127,7 @@ public class DoFnDelegatingAggregatorTest {
     String name = "agg";
     CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
 
-    DoFn<Void, Void> doFn = new NoOpDoFn<>();
+    OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
 
     DelegatingAggregator<Double, Double> aggregator =
         (DelegatingAggregator<Double, Double>) doFn.createAggregator(name, combiner);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
index 3238f2c..0cb3d7b 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
@@ -158,7 +158,7 @@ public class DoFnReflectorTest {
 
   @Test
   public void testDoFnInvokersReused() throws Exception {
-    // Ensures that we don't create a new Invoker class for every instance of the DoFn.
+    // Ensures that we don't create a new Invoker class for every instance of the OldDoFn.
     IdentityParent fn1 = new IdentityParent();
     IdentityParent fn2 = new IdentityParent();
     DoFnReflector reflector1 = underTest(fn1);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java
deleted file mode 100644
index 9242ece..0000000
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java
+++ /dev/null
@@ -1,242 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.transforms;
-
-import static org.hamcrest.Matchers.empty;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.isA;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertThat;
-
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.Pipeline.PipelineExecutionException;
-import org.apache.beam.sdk.PipelineResult;
-import org.apache.beam.sdk.runners.AggregatorValues;
-import org.apache.beam.sdk.testing.NeedsRunner;
-import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.Max.MaxIntegerFn;
-import org.apache.beam.sdk.transforms.Sum.SumIntegerFn;
-import org.apache.beam.sdk.transforms.display.DisplayData;
-
-import com.google.common.collect.ImmutableMap;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-import java.io.Serializable;
-import java.util.Map;
-
-/**
- * Tests for DoFn.
- */
-@RunWith(JUnit4.class)
-public class DoFnTest implements Serializable {
-
-  @Rule
-  public transient ExpectedException thrown = ExpectedException.none();
-
-  @Test
-  public void testCreateAggregatorWithCombinerSucceeds() {
-    String name = "testAggregator";
-    Sum.SumLongFn combiner = new Sum.SumLongFn();
-
-    DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
-    Aggregator<Long, Long> aggregator = doFn.createAggregator(name, combiner);
-
-    assertEquals(name, aggregator.getName());
-    assertEquals(combiner, aggregator.getCombineFn());
-  }
-
-  @Test
-  public void testCreateAggregatorWithNullNameThrowsException() {
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("name cannot be null");
-
-    DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
-    doFn.createAggregator(null, new Sum.SumLongFn());
-  }
-
-  @Test
-  public void testCreateAggregatorWithNullCombineFnThrowsException() {
-    CombineFn<Object, Object, Object> combiner = null;
-
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("combiner cannot be null");
-
-    DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
-    doFn.createAggregator("testAggregator", combiner);
-  }
-
-  @Test
-  public void testCreateAggregatorWithNullSerializableFnThrowsException() {
-    SerializableFunction<Iterable<Object>, Object> combiner = null;
-
-    thrown.expect(NullPointerException.class);
-    thrown.expectMessage("combiner cannot be null");
-
-    DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
-    doFn.createAggregator("testAggregator", combiner);
-  }
-
-  @Test
-  public void testCreateAggregatorWithSameNameThrowsException() {
-    String name = "testAggregator";
-    CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
-
-    DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
-    doFn.createAggregator(name, combiner);
-
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Cannot create");
-    thrown.expectMessage(name);
-    thrown.expectMessage("already exists");
-
-    doFn.createAggregator(name, combiner);
-  }
-
-  @Test
-  public void testCreateAggregatorsWithDifferentNamesSucceeds() {
-    String nameOne = "testAggregator";
-    String nameTwo = "aggregatorPrime";
-    CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
-
-    DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
-    Aggregator<Double, Double> aggregatorOne =
-        doFn.createAggregator(nameOne, combiner);
-    Aggregator<Double, Double> aggregatorTwo =
-        doFn.createAggregator(nameTwo, combiner);
-
-    assertNotEquals(aggregatorOne, aggregatorTwo);
-  }
-
-  @Test
-  @Category(NeedsRunner.class)
-  public void testCreateAggregatorInStartBundleThrows() {
-    TestPipeline p = createTestPipeline(new DoFn<String, String>() {
-      @Override
-      public void startBundle(DoFn<String, String>.Context c) throws Exception {
-        createAggregator("anyAggregate", new MaxIntegerFn());
-      }
-
-      @Override
-      public void processElement(DoFn<String, String>.ProcessContext c) throws Exception {}
-    });
-
-    thrown.expect(PipelineExecutionException.class);
-    thrown.expectCause(isA(IllegalStateException.class));
-
-    p.run();
-  }
-
-  @Test
-  @Category(NeedsRunner.class)
-  public void testCreateAggregatorInProcessElementThrows() {
-    TestPipeline p = createTestPipeline(new DoFn<String, String>() {
-      @Override
-      public void processElement(ProcessContext c) throws Exception {
-        createAggregator("anyAggregate", new MaxIntegerFn());
-      }
-    });
-
-    thrown.expect(PipelineExecutionException.class);
-    thrown.expectCause(isA(IllegalStateException.class));
-
-    p.run();
-  }
-
-  @Test
-  @Category(NeedsRunner.class)
-  public void testCreateAggregatorInFinishBundleThrows() {
-    TestPipeline p = createTestPipeline(new DoFn<String, String>() {
-      @Override
-      public void finishBundle(DoFn<String, String>.Context c) throws Exception {
-        createAggregator("anyAggregate", new MaxIntegerFn());
-      }
-
-      @Override
-      public void processElement(DoFn<String, String>.ProcessContext c) throws Exception {}
-    });
-
-    thrown.expect(PipelineExecutionException.class);
-    thrown.expectCause(isA(IllegalStateException.class));
-
-    p.run();
-  }
-
-  /**
-   * Initialize a test pipeline with the specified {@link DoFn}.
-   */
-  private <InputT, OutputT> TestPipeline createTestPipeline(DoFn<InputT, OutputT> fn) {
-    TestPipeline pipeline = TestPipeline.create();
-    pipeline.apply(Create.of((InputT) null))
-     .apply(ParDo.of(fn));
-
-    return pipeline;
-  }
-
-  @Test
-  public void testPopulateDisplayDataDefaultBehavior() {
-    DoFn<String, String> usesDefault =
-        new DoFn<String, String>() {
-          @Override
-          public void processElement(ProcessContext c) throws Exception {}
-        };
-
-    DisplayData data = DisplayData.from(usesDefault);
-    assertThat(data.items(), empty());
-  }
-
-  @Test
-  @Category(NeedsRunner.class)
-  public void testAggregators() throws Exception {
-    Pipeline pipeline = TestPipeline.create();
-
-    CountOddsFn countOdds = new CountOddsFn();
-    pipeline
-        .apply(Create.of(1, 3, 5, 7, 2, 4, 6, 8, 10, 12, 14, 20, 42, 68, 100))
-        .apply(ParDo.of(countOdds));
-    PipelineResult result = pipeline.run();
-
-    AggregatorValues<Integer> values = result.getAggregatorValues(countOdds.aggregator);
-    assertThat(values.getValuesAtSteps(),
-        equalTo((Map<String, Integer>) ImmutableMap.<String, Integer>of("ParDo(CountOdds)", 4)));
-  }
-
-  private static class CountOddsFn extends DoFn<Integer, Void> {
-    @Override
-    public void processElement(ProcessContext c) throws Exception {
-      if (c.element() % 2 == 1) {
-        aggregator.addValue(1);
-      }
-    }
-
-    Aggregator<Integer, Integer> aggregator =
-        createAggregator("odds", new SumIntegerFn());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java
index 8460a7c..e379f11 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java
@@ -235,7 +235,7 @@ public class DoFnTesterTest {
     final PCollectionView<Integer> value =
         PCollectionViews.singletonView(
             TestPipeline.create(), WindowingStrategy.globalDefault(), true, 0, VarIntCoder.of());
-    DoFn<Integer, Integer> fn = new SideInputDoFn(value);
+    OldDoFn<Integer, Integer> fn = new SideInputDoFn(value);
 
     DoFnTester<Integer, Integer> tester = DoFnTester.of(fn);
 
@@ -251,7 +251,7 @@ public class DoFnTesterTest {
     final PCollectionView<Integer> value =
         PCollectionViews.singletonView(
             TestPipeline.create(), WindowingStrategy.globalDefault(), true, 0, VarIntCoder.of());
-    DoFn<Integer, Integer> fn = new SideInputDoFn(value);
+    OldDoFn<Integer, Integer> fn = new SideInputDoFn(value);
 
     DoFnTester<Integer, Integer> tester = DoFnTester.of(fn);
     tester.setSideInput(value, GlobalWindow.INSTANCE, -2);
@@ -264,7 +264,7 @@ public class DoFnTesterTest {
     assertThat(tester.peekOutputElements(), containsInAnyOrder(-2, -2, -2, -2));
   }
 
-  private static class SideInputDoFn extends DoFn<Integer, Integer> {
+  private static class SideInputDoFn extends OldDoFn<Integer, Integer> {
     private final PCollectionView<Integer> value;
 
     private SideInputDoFn(PCollectionView<Integer> value) {
@@ -278,9 +278,9 @@ public class DoFnTesterTest {
   }
 
   /**
-   * A DoFn that adds values to an aggregator and converts input to String in processElement.
+   * A OldDoFn that adds values to an aggregator and converts input to String in processElement.
    */
-  private static class CounterDoFn extends DoFn<Long, String> {
+  private static class CounterDoFn extends OldDoFn<Long, String> {
     Aggregator<Long, Long> agg = createAggregator("ctr", new Sum.SumLongFn());
     private final long startBundleVal;
     private final long finishBundleVal;



[42/51] [abbrv] incubator-beam git commit: Port easy transforms to new DoFn

Posted by ke...@apache.org.
Port easy transforms to new DoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/47341e11
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/47341e11
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/47341e11

Branch: refs/heads/python-sdk
Commit: 47341e113334827101ddbf775c69ae34d178cd8f
Parents: 269fbf3
Author: Kenneth Knowles <kl...@google.com>
Authored: Wed Aug 3 20:27:28 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 14:56:42 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/beam/sdk/transforms/Count.java  |  4 ++--
 .../java/org/apache/beam/sdk/transforms/Create.java |  4 ++--
 .../apache/beam/sdk/transforms/FlatMapElements.java |  4 ++--
 .../org/apache/beam/sdk/transforms/Flatten.java     |  4 ++--
 .../java/org/apache/beam/sdk/transforms/Keys.java   |  4 ++--
 .../java/org/apache/beam/sdk/transforms/KvSwap.java |  4 ++--
 .../org/apache/beam/sdk/transforms/MapElements.java |  4 ++--
 .../org/apache/beam/sdk/transforms/Partition.java   |  4 ++--
 .../beam/sdk/transforms/RemoveDuplicates.java       |  4 ++--
 .../java/org/apache/beam/sdk/transforms/Sample.java |  6 +++---
 .../java/org/apache/beam/sdk/transforms/Values.java |  4 ++--
 .../java/org/apache/beam/sdk/transforms/View.java   |  8 ++++----
 .../org/apache/beam/sdk/transforms/WithKeys.java    |  4 ++--
 .../apache/beam/sdk/transforms/WithTimestamps.java  |  6 +++---
 .../beam/sdk/transforms/join/CoGroupByKey.java      | 16 ++++++++--------
 15 files changed, 40 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
index 7601ffc..ac59c76 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
@@ -107,8 +107,8 @@ public class Count {
     public PCollection<KV<T, Long>> apply(PCollection<T> input) {
       return
           input
-          .apply("Init", ParDo.of(new OldDoFn<T, KV<T, Void>>() {
-            @Override
+          .apply("Init", ParDo.of(new DoFn<T, KV<T, Void>>() {
+            @ProcessElement
             public void processElement(ProcessContext c) {
               c.output(KV.of(c.element(), (Void) null));
             }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
index fb7f784..08d0a7a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
@@ -486,8 +486,8 @@ public class Create<T> {
       this.elementCoder = elementCoder;
     }
 
-    private static class ConvertTimestamps<T> extends OldDoFn<TimestampedValue<T>, T> {
-      @Override
+    private static class ConvertTimestamps<T> extends DoFn<TimestampedValue<T>, T> {
+      @ProcessElement
       public void processElement(ProcessContext c) {
         c.outputWithTimestamp(c.element().getValue(), c.element().getTimestamp());
       }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
index b48da38..694592e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
@@ -133,9 +133,9 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
 
   @Override
   public PCollection<OutputT> apply(PCollection<InputT> input) {
-    return input.apply("Map", ParDo.of(new OldDoFn<InputT, OutputT>() {
+    return input.apply("Map", ParDo.of(new DoFn<InputT, OutputT>() {
       private static final long serialVersionUID = 0L;
-      @Override
+      @ProcessElement
       public void processElement(ProcessContext c) {
         for (OutputT element : fn.apply(c.element())) {
           c.output(element);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
index 53e898e..7e09d7e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
@@ -174,8 +174,8 @@ public class Flatten {
       Coder<T> elemCoder = ((IterableLikeCoder<T, ?>) inCoder).getElemCoder();
 
       return in.apply("FlattenIterables", ParDo.of(
-          new OldDoFn<Iterable<T>, T>() {
-            @Override
+          new DoFn<Iterable<T>, T>() {
+            @ProcessElement
             public void processElement(ProcessContext c) {
               for (T i : c.element()) {
                 c.output(i);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
index c8cbce8..5ac1866 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
@@ -58,8 +58,8 @@ public class Keys<K> extends PTransform<PCollection<? extends KV<K, ?>>,
   @Override
   public PCollection<K> apply(PCollection<? extends KV<K, ?>> in) {
     return
-        in.apply("Keys", ParDo.of(new OldDoFn<KV<K, ?>, K>() {
-          @Override
+        in.apply("Keys", ParDo.of(new DoFn<KV<K, ?>, K>() {
+          @ProcessElement
           public void processElement(ProcessContext c) {
             c.output(c.element().getKey());
           }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
index 430d37b..d4386d2 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
@@ -62,8 +62,8 @@ public class KvSwap<K, V> extends PTransform<PCollection<KV<K, V>>,
   @Override
   public PCollection<KV<V, K>> apply(PCollection<KV<K, V>> in) {
     return
-        in.apply("KvSwap", ParDo.of(new OldDoFn<KV<K, V>, KV<V, K>>() {
-          @Override
+        in.apply("KvSwap", ParDo.of(new DoFn<KV<K, V>, KV<V, K>>() {
+          @ProcessElement
           public void processElement(ProcessContext c) {
             KV<K, V> e = c.element();
             c.output(KV.of(e.getValue(), e.getKey()));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
index c83c39f..b7b9a5f 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
@@ -104,8 +104,8 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
 
   @Override
   public PCollection<OutputT> apply(PCollection<InputT> input) {
-    return input.apply("Map", ParDo.of(new OldDoFn<InputT, OutputT>() {
-      @Override
+    return input.apply("Map", ParDo.of(new DoFn<InputT, OutputT>() {
+      @ProcessElement
       public void processElement(ProcessContext c) {
         c.output(fn.apply(c.element()));
       }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
index 2ddcc29..05c9470 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
@@ -134,7 +134,7 @@ public class Partition<T> extends PTransform<PCollection<T>, PCollectionList<T>>
     this.partitionDoFn = partitionDoFn;
   }
 
-  private static class PartitionDoFn<X> extends OldDoFn<X, Void> {
+  private static class PartitionDoFn<X> extends DoFn<X, Void> {
     private final int numPartitions;
     private final PartitionFn<? super X> partitionFn;
     private final TupleTagList outputTags;
@@ -163,7 +163,7 @@ public class Partition<T> extends PTransform<PCollection<T>, PCollectionList<T>>
       return outputTags;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       X input = c.element();
       int partition = partitionFn.partitionFor(input, numPartitions);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
index d82c457..bba4b51 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
@@ -85,8 +85,8 @@ public class RemoveDuplicates<T> extends PTransform<PCollection<T>,
   @Override
   public PCollection<T> apply(PCollection<T> in) {
     return in
-        .apply("CreateIndex", ParDo.of(new OldDoFn<T, KV<T, Void>>() {
-          @Override
+        .apply("CreateIndex", ParDo.of(new DoFn<T, KV<T, Void>>() {
+          @ProcessElement
           public void processElement(ProcessContext c) {
             c.output(KV.of(c.element(), (Void) null));
           }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
index 724b252..12ff2b9 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
@@ -164,9 +164,9 @@ public class Sample {
   }
 
   /**
-   * A {@link OldDoFn} that returns up to limit elements from the side input PCollection.
+   * A {@link DoFn} that returns up to limit elements from the side input PCollection.
    */
-  private static class SampleAnyDoFn<T> extends OldDoFn<Void, T> {
+  private static class SampleAnyDoFn<T> extends DoFn<Void, T> {
     long limit;
     final PCollectionView<Iterable<T>> iterableView;
 
@@ -175,7 +175,7 @@ public class Sample {
       this.iterableView = iterableView;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       for (T i : c.sideInput(iterableView)) {
         if (limit-- <= 0) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
index 856e32a..34342db 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
@@ -58,8 +58,8 @@ public class Values<V> extends PTransform<PCollection<? extends KV<?, V>>,
   @Override
   public PCollection<V> apply(PCollection<? extends KV<?, V>> in) {
     return
-        in.apply("Values", ParDo.of(new OldDoFn<KV<?, V>, V>() {
-          @Override
+        in.apply("Values", ParDo.of(new DoFn<KV<?, V>, V>() {
+          @ProcessElement
           public void processElement(ProcessContext c) {
             c.output(c.element().getValue());
           }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
index 8a61637..7a97c13 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
@@ -38,7 +38,7 @@ import java.util.Map;
  *
  * <p>When a {@link ParDo} tranform is processing a main input
  * element in a window {@code w} and a {@link PCollectionView} is read via
- * {@link OldDoFn.ProcessContext#sideInput}, the value of the view for {@code w} is
+ * {@link DoFn.ProcessContext#sideInput}, the value of the view for {@code w} is
  * returned.
  *
  * <p>The SDK supports viewing a {@link PCollection}, per window, as a single value,
@@ -118,7 +118,7 @@ import java.util.Map;
  *
  * PCollection PageVisits = urlVisits
  *     .apply(ParDo.withSideInputs(urlToPage)
- *         .of(new OldDoFn<UrlVisit, PageVisit>() {
+ *         .of(new DoFn<UrlVisit, PageVisit>() {
  *             {@literal @}Override
  *             void processElement(ProcessContext context) {
  *               UrlVisit urlVisit = context.element();
@@ -154,11 +154,11 @@ public class View {
    *
    * <p>If the input {@link PCollection} is empty,
    * throws {@link java.util.NoSuchElementException} in the consuming
-   * {@link OldDoFn}.
+   * {@link DoFn}.
    *
    * <p>If the input {@link PCollection} contains more than one
    * element, throws {@link IllegalArgumentException} in the
-   * consuming {@link OldDoFn}.
+   * consuming {@link DoFn}.
    */
   public static <T> AsSingleton<T> asSingleton() {
     return new AsSingleton<>();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
index 37d45aa..2a44963 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
@@ -113,8 +113,8 @@ public class WithKeys<K, V> extends PTransform<PCollection<V>,
   @Override
   public PCollection<KV<K, V>> apply(PCollection<V> in) {
     PCollection<KV<K, V>> result =
-        in.apply("AddKeys", ParDo.of(new OldDoFn<V, KV<K, V>>() {
-          @Override
+        in.apply("AddKeys", ParDo.of(new DoFn<V, KV<K, V>>() {
+          @ProcessElement
           public void processElement(ProcessContext c) {
             c.output(KV.of(fn.apply(c.element()),
                 c.element()));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
index 41b549b..7b395f5 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
@@ -92,7 +92,7 @@ public class WithTimestamps<T> extends PTransform<PCollection<T>, PCollection<T>
    * Returns the allowed timestamp skew duration, which is the maximum
    * duration that timestamps can be shifted backwards from the timestamp of the input element.
    *
-   * @see OldDoFn#getAllowedTimestampSkew()
+   * @see DoFn#getAllowedTimestampSkew()
    */
   public Duration getAllowedTimestampSkew() {
     return allowedTimestampSkew;
@@ -105,7 +105,7 @@ public class WithTimestamps<T> extends PTransform<PCollection<T>, PCollection<T>
         .setTypeDescriptorInternal(input.getTypeDescriptor());
   }
 
-  private static class AddTimestampsDoFn<T> extends OldDoFn<T, T> {
+  private static class AddTimestampsDoFn<T> extends DoFn<T, T> {
     private final SerializableFunction<T, Instant> fn;
     private final Duration allowedTimestampSkew;
 
@@ -114,7 +114,7 @@ public class WithTimestamps<T> extends PTransform<PCollection<T>, PCollection<T>
       this.allowedTimestampSkew = allowedTimestampSkew;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       Instant timestamp = fn.apply(c.element());
       checkNotNull(

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/47341e11/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
index 1bd9f4a..cb06f95 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
@@ -19,9 +19,9 @@ package org.apache.beam.sdk.transforms.join;
 
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.join.CoGbkResult.CoGbkResultCoder;
@@ -57,7 +57,7 @@ import java.util.List;
  *
  * PCollection<T> finalResultCollection =
  *   coGbkResultCollection.apply(ParDo.of(
- *     new OldDoFn<KV<K, CoGbkResult>, T>() {
+ *     new DoFn<KV<K, CoGbkResult>, T>() {
  *       @Override
  *       public void processElement(ProcessContext c) {
  *         KV<K, CoGbkResult> e = c.element();
@@ -167,12 +167,12 @@ public class CoGroupByKey<K> extends
   }
 
   /**
-   * A OldDoFn to construct a UnionTable (i.e., a
+   * A DoFn to construct a UnionTable (i.e., a
    * {@code PCollection<KV<K, RawUnionValue>>} from a
    * {@code PCollection<KV<K, V>>}.
    */
   private static class ConstructUnionTableFn<K, V> extends
-      OldDoFn<KV<K, V>, KV<K, RawUnionValue>> {
+      DoFn<KV<K, V>, KV<K, RawUnionValue>> {
 
     private final int index;
 
@@ -180,7 +180,7 @@ public class CoGroupByKey<K> extends
       this.index = index;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       KV<K, ?> e = c.element();
       c.output(KV.of(e.getKey(), new RawUnionValue(index, e.getValue())));
@@ -188,11 +188,11 @@ public class CoGroupByKey<K> extends
   }
 
   /**
-   * A OldDoFn to construct a CoGbkResult from an input grouped union
+   * A DoFn to construct a CoGbkResult from an input grouped union
    * table.
     */
   private static class ConstructCoGbkResultFn<K>
-    extends OldDoFn<KV<K, Iterable<RawUnionValue>>,
+    extends DoFn<KV<K, Iterable<RawUnionValue>>,
                      KV<K, CoGbkResult>> {
 
     private final CoGbkResultSchema schema;
@@ -201,7 +201,7 @@ public class CoGroupByKey<K> extends
       this.schema = schema;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       KV<K, Iterable<RawUnionValue>> e = c.element();
       c.output(KV.of(e.getKey(), new CoGbkResult(schema, e.getValue())));


[12/51] [abbrv] incubator-beam git commit: Port DebuggingWordCount example from OldDoFn to DoFn

Posted by ke...@apache.org.
Port DebuggingWordCount example from OldDoFn to DoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/49d2f170
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/49d2f170
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/49d2f170

Branch: refs/heads/python-sdk
Commit: 49d2f1706f69c5106a9082ffd2fecaf69b2d868c
Parents: ca9e337
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:29:18 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/beam/examples/DebuggingWordCount.java    | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/49d2f170/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
index 3c43152..c1b273c 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.values.KV;
@@ -106,8 +106,8 @@ import java.util.regex.Pattern;
  * overridden with {@code --inputFile}.
  */
 public class DebuggingWordCount {
-  /** A OldDoFn that filters for a specific key based upon a regular expression. */
-  public static class FilterTextFn extends OldDoFn<KV<String, Long>, KV<String, Long>> {
+  /** A DoFn that filters for a specific key based upon a regular expression. */
+  public static class FilterTextFn extends DoFn<KV<String, Long>, KV<String, Long>> {
     /**
      * Concept #1: The logger below uses the fully qualified class name of FilterTextFn
      * as the logger. All log statements emitted by this logger will be referenced by this name
@@ -133,7 +133,7 @@ public class DebuggingWordCount {
     private final Aggregator<Long, Long> unmatchedWords =
         createAggregator("umatchedWords", new Sum.SumLongFn());
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       if (filter.matcher(c.element().getKey()).matches()) {
         // Log at the "DEBUG" level each element that we match. When executing this pipeline


[05/51] [abbrv] incubator-beam git commit: Rename DoFn to OldDoFn

Posted by ke...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
index 5b9eeff..5e96c46 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
@@ -30,7 +30,7 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -145,9 +145,9 @@ public class SerializationTest {
   }
 
   /**
-   * A DoFn that tokenizes lines of text into individual words.
+   * A OldDoFn that tokenizes lines of text into individual words.
    */
-  static class ExtractWordsFn extends DoFn<StringHolder, StringHolder> {
+  static class ExtractWordsFn extends OldDoFn<StringHolder, StringHolder> {
     private static final Pattern WORD_BOUNDARY = Pattern.compile("[^a-zA-Z']+");
     private final Aggregator<Long, Long> emptyLines =
         createAggregator("emptyLines", new Sum.SumLongFn());
@@ -173,9 +173,9 @@ public class SerializationTest {
   }
 
   /**
-   * A DoFn that converts a Word and Count into a printable string.
+   * A OldDoFn that converts a Word and Count into a printable string.
    */
-  private static class FormatCountsFn extends DoFn<KV<StringHolder, Long>, StringHolder> {
+  private static class FormatCountsFn extends OldDoFn<KV<StringHolder, Long>, StringHolder> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(new StringHolder(c.element().getKey() + ": " + c.element().getValue()));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
index 60b7f71..5775565 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
@@ -28,7 +28,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.StringDelegateCoder;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 
 import org.junit.After;
@@ -54,7 +54,7 @@ public class SideEffectsTest implements Serializable {
 
     pipeline.getCoderRegistry().registerCoder(URI.class, StringDelegateCoder.of(URI.class));
 
-    pipeline.apply(Create.of("a")).apply(ParDo.of(new DoFn<String, String>() {
+    pipeline.apply(Create.of("a")).apply(ParDo.of(new OldDoFn<String, String>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {
         throw new UserException();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
index 904b448..c005f14 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.KvCoder;
 import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.FixedWindows;
 import org.apache.beam.sdk.transforms.windowing.Window;
@@ -122,7 +122,7 @@ public class KafkaStreamingTest {
     EMBEDDED_ZOOKEEPER.shutdown();
   }
 
-  private static class FormatKVFn extends DoFn<KV<String, String>, String> {
+  private static class FormatKVFn extends OldDoFn<KV<String, String>, String> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(c.element().getKey() + "," + c.element().getValue());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java
index 873a591..da4db93 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java
@@ -24,7 +24,6 @@ import org.apache.beam.sdk.values.TypeDescriptor;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-
 import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericDatumReader;
 import org.apache.avro.generic.GenericDatumWriter;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java
index 9db6650..c34ce66 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java
@@ -20,7 +20,6 @@ package org.apache.beam.sdk.coders;
 import org.apache.beam.sdk.util.common.ElementByteSizeObserver;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
-
 import org.joda.time.Duration;
 import org.joda.time.ReadableDuration;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java
index 693791c..d41bd1f 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java
@@ -22,7 +22,6 @@ import org.apache.beam.sdk.util.common.ElementByteSizeObserver;
 import com.google.common.base.Converter;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
-
 import org.joda.time.Instant;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
index ecb1f0a..182fa1f 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
@@ -25,7 +25,7 @@ import org.apache.beam.sdk.coders.VoidCoder;
 import org.apache.beam.sdk.options.PubsubOptions;
 import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -709,7 +709,7 @@ public class PubsubIO {
        *
        * <p>Public so can be suppressed by runners.
        */
-      public class PubsubBoundedReader extends DoFn<Void, T> {
+      public class PubsubBoundedReader extends OldDoFn<Void, T> {
         private static final int DEFAULT_PULL_SIZE = 100;
         private static final int ACK_TIMEOUT_SEC = 60;
 
@@ -998,7 +998,7 @@ public class PubsubIO {
        *
        * <p>Public so can be suppressed by runners.
        */
-      public class PubsubBoundedWriter extends DoFn<T, Void> {
+      public class PubsubBoundedWriter extends OldDoFn<T, Void> {
         private static final int MAX_PUBLISH_BATCH_SIZE = 100;
         private transient List<OutgoingMessage> output;
         private transient PubsubClient pubsubClient;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
index 6f2b3ac..9e9536d 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
@@ -31,8 +31,8 @@ import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.coders.VarIntCoder;
 import org.apache.beam.sdk.options.PubsubOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -78,7 +78,7 @@ import javax.annotation.Nullable;
  * <li>We try to send messages in batches while also limiting send latency.
  * <li>No stats are logged. Rather some counters are used to keep track of elements and batches.
  * <li>Though some background threads are used by the underlying netty system all actual Pubsub
- * calls are blocking. We rely on the underlying runner to allow multiple {@link DoFn} instances
+ * calls are blocking. We rely on the underlying runner to allow multiple {@link OldDoFn} instances
  * to execute concurrently and hide latency.
  * <li>A failed bundle will cause messages to be resent. Thus we rely on the Pubsub consumer
  * to dedup messages.
@@ -155,7 +155,7 @@ public class PubsubUnboundedSink<T> extends PTransform<PCollection<T>, PDone> {
   /**
    * Convert elements to messages and shard them.
    */
-  private static class ShardFn<T> extends DoFn<T, KV<Integer, OutgoingMessage>> {
+  private static class ShardFn<T> extends OldDoFn<T, KV<Integer, OutgoingMessage>> {
     private final Aggregator<Long, Long> elementCounter =
         createAggregator("elements", new Sum.SumLongFn());
     private final Coder<T> elementCoder;
@@ -207,7 +207,7 @@ public class PubsubUnboundedSink<T> extends PTransform<PCollection<T>, PDone> {
    * Publish messages to Pubsub in batches.
    */
   private static class WriterFn
-      extends DoFn<KV<Integer, Iterable<OutgoingMessage>>, Void> {
+      extends OldDoFn<KV<Integer, Iterable<OutgoingMessage>>, Void> {
     private final PubsubClientFactory pubsubFactory;
     private final TopicPath topic;
     private final String timestampLabel;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
index 07d355e..d98bd6a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
@@ -31,7 +31,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PubsubOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -1107,7 +1107,7 @@ public class PubsubUnboundedSource<T> extends PTransform<PBegin, PCollection<T>>
   // StatsFn
   // ================================================================================
 
-  private static class StatsFn<T> extends DoFn<T, T> {
+  private static class StatsFn<T> extends OldDoFn<T, T> {
     private final Aggregator<Long, Long> elementCounter =
         createAggregator("elements", new Sum.SumLongFn());
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java
index b8902f9..de00035 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java
@@ -19,9 +19,9 @@ package org.apache.beam.sdk.io;
 
 import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.coders.Coder;
-
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.transforms.display.HasDisplayData;
+
 import org.joda.time.Instant;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java
index 42d3c05..3e997b0 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java
@@ -27,8 +27,8 @@ import org.apache.beam.sdk.io.Sink.WriteOperation;
 import org.apache.beam.sdk.io.Sink.Writer;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -156,7 +156,7 @@ public class Write {
      * Writes all the elements in a bundle using a {@link Writer} produced by the
      * {@link WriteOperation} associated with the {@link Sink}.
      */
-    private class WriteBundles<WriteT> extends DoFn<T, WriteT> {
+    private class WriteBundles<WriteT> extends OldDoFn<T, WriteT> {
       // Writer that will write the records in this bundle. Lazily
       // initialized in processElement.
       private Writer<T, WriteT> writer = null;
@@ -182,7 +182,7 @@ public class Write {
           // Discard write result and close the write.
           try {
             writer.close();
-            // The writer does not need to be reset, as this DoFn cannot be reused.
+            // The writer does not need to be reset, as this OldDoFn cannot be reused.
           } catch (Exception closeException) {
             if (closeException instanceof InterruptedException) {
               // Do not silently ignore interrupted state.
@@ -217,7 +217,7 @@ public class Write {
      *
      * @see WriteBundles
      */
-    private class WriteShardedBundles<WriteT> extends DoFn<KV<Integer, Iterable<T>>, WriteT> {
+    private class WriteShardedBundles<WriteT> extends OldDoFn<KV<Integer, Iterable<T>>, WriteT> {
       private final PCollectionView<WriteOperation<T, WriteT>> writeOperationView;
 
       WriteShardedBundles(PCollectionView<WriteOperation<T, WriteT>> writeOperationView) {
@@ -296,10 +296,11 @@ public class Write {
      * <p>This singleton collection containing the WriteOperation is then used as a side input to a
      * ParDo over the PCollection of elements to write. In this bundle-writing phase,
      * {@link WriteOperation#createWriter} is called to obtain a {@link Writer}.
-     * {@link Writer#open} and {@link Writer#close} are called in {@link DoFn#startBundle} and
-     * {@link DoFn#finishBundle}, respectively, and {@link Writer#write} method is called for every
-     * element in the bundle. The output of this ParDo is a PCollection of <i>writer result</i>
-     * objects (see {@link Sink} for a description of writer results)-one for each bundle.
+     * {@link Writer#open} and {@link Writer#close} are called in {@link OldDoFn#startBundle} and
+     * {@link OldDoFn#finishBundle}, respectively, and {@link Writer#write} method is called for
+     * every element in the bundle. The output of this ParDo is a PCollection of
+     * <i>writer result</i> objects (see {@link Sink} for a description of writer results)-one for
+     * each bundle.
      *
      * <p>The final do-once ParDo uses the singleton collection of the WriteOperation as input and
      * the collection of writer results as a side-input. In this ParDo,
@@ -333,7 +334,7 @@ public class Write {
       // Initialize the resource in a do-once ParDo on the WriteOperation.
       operationCollection = operationCollection
           .apply("Initialize", ParDo.of(
-              new DoFn<WriteOperation<T, WriteT>, WriteOperation<T, WriteT>>() {
+              new OldDoFn<WriteOperation<T, WriteT>, WriteOperation<T, WriteT>>() {
             @Override
             public void processElement(ProcessContext c) throws Exception {
               WriteOperation<T, WriteT> writeOperation = c.element();
@@ -387,7 +388,7 @@ public class Write {
       // ParDo. There is a dependency between this ParDo and the parallel write (the writer results
       // collection as a side input), so it will happen after the parallel write.
       operationCollection
-          .apply("Finalize", ParDo.of(new DoFn<WriteOperation<T, WriteT>, Integer>() {
+          .apply("Finalize", ParDo.of(new OldDoFn<WriteOperation<T, WriteT>, Integer>() {
             @Override
             public void processElement(ProcessContext c) throws Exception {
               WriteOperation<T, WriteT> writeOperation = c.element();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java
index e0a1ef3..b2df96e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java
@@ -29,7 +29,6 @@ import com.google.common.base.Strings;
 import com.google.common.io.Files;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
index e89e5ad..aa9f13e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
@@ -22,8 +22,8 @@ import org.apache.beam.sdk.options.GoogleApiDebugOptions.GoogleApiTracer;
 import org.apache.beam.sdk.options.ProxyInvocationHandler.Deserializer;
 import org.apache.beam.sdk.options.ProxyInvocationHandler.Serializer;
 import org.apache.beam.sdk.runners.PipelineRunner;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.Context;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.Context;
 import org.apache.beam.sdk.transforms.display.HasDisplayData;
 
 import com.google.auto.service.AutoService;
@@ -52,7 +52,7 @@ import javax.annotation.concurrent.ThreadSafe;
  * and {@link PipelineOptionsFactory#as(Class)}. They can be created
  * from command-line arguments with {@link PipelineOptionsFactory#fromArgs(String[])}.
  * They can be converted to another type by invoking {@link PipelineOptions#as(Class)} and
- * can be accessed from within a {@link DoFn} by invoking
+ * can be accessed from within a {@link OldDoFn} by invoking
  * {@link Context#getPipelineOptions()}.
  *
  * <p>For example:
@@ -151,7 +151,7 @@ import javax.annotation.concurrent.ThreadSafe;
  * {@link PipelineOptionsFactory#withValidation()} is invoked.
  *
  * <p>{@link JsonIgnore @JsonIgnore} is used to prevent a property from being serialized and
- * available during execution of {@link DoFn}. See the Serialization section below for more
+ * available during execution of {@link OldDoFn}. See the Serialization section below for more
  * details.
  *
  * <h2>Registration Of PipelineOptions</h2>

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
index f21b9b9..67fa2af 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
@@ -53,7 +53,6 @@ import com.google.common.collect.TreeMultimap;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.databind.JavaType;
 import com.fasterxml.jackson.databind.ObjectMapper;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java
index 815de82..607bdda 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java
@@ -18,6 +18,7 @@
 package org.apache.beam.sdk.options;
 
 import org.apache.beam.sdk.util.common.ReflectHelpers;
+
 import com.google.common.collect.HashMultimap;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Multimap;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/AggregatorValues.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/AggregatorValues.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/AggregatorValues.java
index a42ece2..6f6836e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/AggregatorValues.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/AggregatorValues.java
@@ -19,14 +19,14 @@ package org.apache.beam.sdk.runners;
 
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 
 import java.util.Collection;
 import java.util.Map;
 
 /**
  * A collection of values associated with an {@link Aggregator}. Aggregators declared in a
- * {@link DoFn} are emitted on a per-{@code DoFn}-application basis.
+ * {@link OldDoFn} are emitted on a per-{@code OldDoFn}-application basis.
  *
  * @param <T> the output type of the aggregator
  */

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
index a202ed4..80340c2 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
@@ -33,11 +33,11 @@ import org.apache.beam.sdk.options.StreamingOptions;
 import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -762,7 +762,7 @@ public class PAssert {
           .apply("RewindowActuals", rewindowActuals.<T>windowActuals())
           .apply(
               ParDo.of(
-                  new DoFn<T, T>() {
+                  new OldDoFn<T, T>() {
                     @Override
                     public void processElement(ProcessContext context) throws CoderException {
                       context.output(CoderUtils.clone(coder, context.element()));
@@ -884,7 +884,7 @@ public class PAssert {
     }
   }
 
-  private static final class ConcatFn<T> extends DoFn<Iterable<Iterable<T>>, Iterable<T>> {
+  private static final class ConcatFn<T> extends OldDoFn<Iterable<Iterable<T>>, Iterable<T>> {
     @Override
     public void processElement(ProcessContext c) throws Exception {
       c.output(Iterables.concat(c.element()));
@@ -995,13 +995,13 @@ public class PAssert {
   }
 
   /**
-   * A {@link DoFn} that runs a checking {@link SerializableFunction} on the contents of a
+   * A {@link OldDoFn} that runs a checking {@link SerializableFunction} on the contents of a
    * {@link PCollectionView}, and adjusts counters and thrown exceptions for use in testing.
    *
    * <p>The input is ignored, but is {@link Integer} to be usable on runners that do not support
    * null values.
    */
-  private static class SideInputCheckerDoFn<ActualT> extends DoFn<Integer, Void> {
+  private static class SideInputCheckerDoFn<ActualT> extends OldDoFn<Integer, Void> {
     private final SerializableFunction<ActualT, Void> checkerFn;
     private final Aggregator<Integer, Integer> success =
         createAggregator(SUCCESS_COUNTER, new Sum.SumIntegerFn());
@@ -1030,13 +1030,13 @@ public class PAssert {
   }
 
   /**
-   * A {@link DoFn} that runs a checking {@link SerializableFunction} on the contents of
+   * A {@link OldDoFn} that runs a checking {@link SerializableFunction} on the contents of
    * the single iterable element of the input {@link PCollection} and adjusts counters and
    * thrown exceptions for use in testing.
    *
    * <p>The singleton property is presumed, not enforced.
    */
-  private static class GroupedValuesCheckerDoFn<ActualT> extends DoFn<ActualT, Void> {
+  private static class GroupedValuesCheckerDoFn<ActualT> extends OldDoFn<ActualT, Void> {
     private final SerializableFunction<ActualT, Void> checkerFn;
     private final Aggregator<Integer, Integer> success =
         createAggregator(SUCCESS_COUNTER, new Sum.SumIntegerFn());
@@ -1061,14 +1061,14 @@ public class PAssert {
   }
 
   /**
-   * A {@link DoFn} that runs a checking {@link SerializableFunction} on the contents of
+   * A {@link OldDoFn} that runs a checking {@link SerializableFunction} on the contents of
    * the single item contained within the single iterable on input and
    * adjusts counters and thrown exceptions for use in testing.
    *
    * <p>The singleton property of the input {@link PCollection} is presumed, not enforced. However,
    * each input element must be a singleton iterable, or this will fail.
    */
-  private static class SingletonCheckerDoFn<ActualT> extends DoFn<Iterable<ActualT>, Void> {
+  private static class SingletonCheckerDoFn<ActualT> extends OldDoFn<Iterable<ActualT>, Void> {
     private final SerializableFunction<ActualT, Void> checkerFn;
     private final Aggregator<Integer, Integer> success =
         createAggregator(SUCCESS_COUNTER, new Sum.SumIntegerFn());
@@ -1310,7 +1310,7 @@ public class PAssert {
   }
 
   /**
-   * A DoFn that filters elements based on their presence in a static collection of windows.
+   * A OldDoFn that filters elements based on their presence in a static collection of windows.
    */
   private static final class FilterWindows<T> extends PTransform<PCollection<T>, PCollection<T>> {
     private final StaticWindows windows;
@@ -1324,7 +1324,7 @@ public class PAssert {
       return input.apply("FilterWindows", ParDo.of(new Fn()));
     }
 
-    private class Fn extends DoFn<T, T> implements RequiresWindowAccess {
+    private class Fn extends OldDoFn<T, T> implements RequiresWindowAccess {
       @Override
       public void processElement(ProcessContext c) throws Exception {
         if (windows.getWindows().contains(c.window())) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
index 45b0592..4e0c0be 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
@@ -35,7 +35,6 @@ import java.io.Serializable;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
-
 import javax.annotation.Nullable;
 
 import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
index 0de3024..98cdeba 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
@@ -37,7 +37,6 @@ import com.fasterxml.jackson.core.TreeNode;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
-
 import org.junit.experimental.categories.Category;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java
index ff553ba..c4596c1 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java
@@ -21,6 +21,7 @@ import org.apache.beam.sdk.PipelineResult;
 import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.DefaultValueFactory;
 import org.apache.beam.sdk.options.PipelineOptions;
+
 import org.hamcrest.BaseMatcher;
 import org.hamcrest.Description;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Aggregator.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Aggregator.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Aggregator.java
index c8aad78..db4ab33 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Aggregator.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Aggregator.java
@@ -24,8 +24,9 @@ import org.apache.beam.sdk.util.ExecutionContext;
  * An {@code Aggregator<InputT>} enables monitoring of values of type {@code InputT},
  * to be combined across all bundles.
  *
- * <p>Aggregators are created by calling {@link DoFn#createAggregator DoFn.createAggregatorForDoFn},
- * typically from the {@link DoFn} constructor. Elements can be added to the
+ * <p>Aggregators are created by calling
+ * {@link OldDoFn#createAggregator OldDoFn.createAggregatorForDoFn},
+ * typically from the {@link OldDoFn} constructor. Elements can be added to the
  * {@code Aggregator} by calling {@link Aggregator#addValue}.
  *
  * <p>Aggregators are visible in the monitoring UI, when the pipeline is run
@@ -36,7 +37,7 @@ import org.apache.beam.sdk.util.ExecutionContext;
  *
  * <p>Example:
  * <pre> {@code
- * class MyDoFn extends DoFn<String, String> {
+ * class MyDoFn extends OldDoFn<String, String> {
  *   private Aggregator<Integer, Integer> myAggregator;
  *
  *   public MyDoFn() {
@@ -78,8 +79,9 @@ public interface Aggregator<InputT, OutputT> {
     /**
      * Create an aggregator with the given {@code name} and {@link CombineFn}.
      *
-     *  <p>This method is called to create an aggregator for a {@link DoFn}. It receives the class
-     *  of the {@link DoFn} being executed and the context of the step it is being executed in.
+     *  <p>This method is called to create an aggregator for a {@link OldDoFn}. It receives the
+     *  class of the {@link OldDoFn} being executed and the context of the step it is being
+     *  executed in.
      */
     <InputT, AccumT, OutputT> Aggregator<InputT, OutputT> createAggregatorForDoFn(
         Class<?> fnClass, ExecutionContext.StepContext stepContext,
@@ -88,7 +90,7 @@ public interface Aggregator<InputT, OutputT> {
 
   // TODO: Consider the following additional API conveniences:
   // - In addition to createAggregatorForDoFn(), consider adding getAggregator() to
-  //   avoid the need to store the aggregator locally in a DoFn, i.e., create
+  //   avoid the need to store the aggregator locally in a OldDoFn, i.e., create
   //   if not already present.
   // - Add a shortcut for the most common aggregator:
   //   c.createAggregatorForDoFn("name", new Sum.SumIntegerFn()).

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AggregatorRetriever.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AggregatorRetriever.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AggregatorRetriever.java
index 97961e9..abed843 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AggregatorRetriever.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AggregatorRetriever.java
@@ -20,7 +20,7 @@ package org.apache.beam.sdk.transforms;
 import java.util.Collection;
 
 /**
- * An internal class for extracting {@link Aggregator Aggregators} from {@link DoFn DoFns}.
+ * An internal class for extracting {@link Aggregator Aggregators} from {@link OldDoFn DoFns}.
  */
 public final class AggregatorRetriever {
   private AggregatorRetriever() {
@@ -28,9 +28,9 @@ public final class AggregatorRetriever {
   }
 
   /**
-   * Returns the {@link Aggregator Aggregators} created by the provided {@link DoFn}.
+   * Returns the {@link Aggregator Aggregators} created by the provided {@link OldDoFn}.
    */
-  public static Collection<Aggregator<?, ?>> getAggregators(DoFn<?, ?> fn) {
+  public static Collection<Aggregator<?, ?>> getAggregators(OldDoFn<?, ?> fn) {
     return fn.getAggregators();
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
index 96c03eb..6fc2324 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
@@ -1473,9 +1473,9 @@ public class Combine {
       PCollection<OutputT> defaultIfEmpty = maybeEmpty.getPipeline()
           .apply("CreateVoid", Create.of((Void) null).withCoder(VoidCoder.of()))
           .apply("ProduceDefault", ParDo.withSideInputs(maybeEmptyView).of(
-              new DoFn<Void, OutputT>() {
+              new OldDoFn<Void, OutputT>() {
                 @Override
-                public void processElement(DoFn<Void, OutputT>.ProcessContext c) {
+                public void processElement(OldDoFn<Void, OutputT>.ProcessContext c) {
                   Iterator<OutputT> combined = c.sideInput(maybeEmptyView).iterator();
                   if (!combined.hasNext()) {
                     c.output(defaultValue);
@@ -2097,7 +2097,7 @@ public class Combine {
       final TupleTag<KV<KV<K, Integer>, InputT>> hot = new TupleTag<>();
       final TupleTag<KV<K, InputT>> cold = new TupleTag<>();
       PCollectionTuple split = input.apply("AddNonce", ParDo.of(
-          new DoFn<KV<K, InputT>, KV<K, InputT>>() {
+          new OldDoFn<KV<K, InputT>, KV<K, InputT>>() {
             transient int counter;
             @Override
             public void startBundle(Context c) {
@@ -2135,8 +2135,8 @@ public class Combine {
           .setWindowingStrategyInternal(preCombineStrategy)
           .apply("PreCombineHot", Combine.perKey(hotPreCombine))
           .apply("StripNonce", ParDo.of(
-              new DoFn<KV<KV<K, Integer>, AccumT>,
-                       KV<K, InputOrAccum<InputT, AccumT>>>() {
+              new OldDoFn<KV<KV<K, Integer>, AccumT>,
+                                     KV<K, InputOrAccum<InputT, AccumT>>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   c.output(KV.of(
@@ -2151,7 +2151,7 @@ public class Combine {
           .get(cold)
           .setCoder(inputCoder)
           .apply("PrepareCold", ParDo.of(
-              new DoFn<KV<K, InputT>, KV<K, InputOrAccum<InputT, AccumT>>>() {
+              new OldDoFn<KV<K, InputT>, KV<K, InputOrAccum<InputT, AccumT>>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   c.output(KV.of(c.element().getKey(),
@@ -2359,7 +2359,7 @@ public class Combine {
       final PerKeyCombineFnRunner<? super K, ? super InputT, ?, OutputT> combineFnRunner =
           PerKeyCombineFnRunners.create(fn);
       PCollection<KV<K, OutputT>> output = input.apply(ParDo.of(
-          new DoFn<KV<K, ? extends Iterable<InputT>>, KV<K, OutputT>>() {
+          new OldDoFn<KV<K, ? extends Iterable<InputT>>, KV<K, OutputT>>() {
             @Override
             public void processElement(ProcessContext c) {
               K key = c.element().getKey();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
index f2ed5e1..777deba 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
@@ -90,7 +90,7 @@ public class CombineFns {
    *
    * PCollection<T> finalResultCollection = maxAndMean
    *     .apply(ParDo.of(
-   *         new DoFn<KV<K, CoCombineResult>, T>() {
+   *         new OldDoFn<KV<K, CoCombineResult>, T>() {
    *           @Override
    *           public void processElement(ProcessContext c) throws Exception {
    *             KV<K, CoCombineResult> e = c.element();
@@ -133,7 +133,7 @@ public class CombineFns {
    *
    * PCollection<T> finalResultCollection = maxAndMean
    *     .apply(ParDo.of(
-   *         new DoFn<CoCombineResult, T>() {
+   *         new OldDoFn<CoCombineResult, T>() {
    *           @Override
    *           public void processElement(ProcessContext c) throws Exception {
    *             CoCombineResult e = c.element();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
index 3a0fb5d..7601ffc 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
@@ -107,7 +107,7 @@ public class Count {
     public PCollection<KV<T, Long>> apply(PCollection<T> input) {
       return
           input
-          .apply("Init", ParDo.of(new DoFn<T, KV<T, Void>>() {
+          .apply("Init", ParDo.of(new OldDoFn<T, KV<T, Void>>() {
             @Override
             public void processElement(ProcessContext c) {
               c.output(KV.of(c.element(), (Void) null));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
index fa645ab..fb7f784 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
@@ -486,7 +486,7 @@ public class Create<T> {
       this.elementCoder = elementCoder;
     }
 
-    private static class ConvertTimestamps<T> extends DoFn<TimestampedValue<T>, T> {
+    private static class ConvertTimestamps<T> extends OldDoFn<TimestampedValue<T>, T> {
       @Override
       public void processElement(ProcessContext c) {
         c.outputWithTimestamp(c.element().getValue(), c.element().getTimestamp());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
deleted file mode 100644
index 6d5d1ed..0000000
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
+++ /dev/null
@@ -1,565 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.transforms;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-import static com.google.common.base.Preconditions.checkState;
-
-import org.apache.beam.sdk.annotations.Experimental;
-import org.apache.beam.sdk.annotations.Experimental.Kind;
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.display.DisplayData;
-import org.apache.beam.sdk.transforms.display.HasDisplayData;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.transforms.windowing.PaneInfo;
-import org.apache.beam.sdk.util.WindowingInternals;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.TypeDescriptor;
-
-import com.google.common.base.MoreObjects;
-
-import org.joda.time.Duration;
-import org.joda.time.Instant;
-
-import java.io.Serializable;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-import java.util.UUID;
-
-/**
- * The argument to {@link ParDo} providing the code to use to process
- * elements of the input
- * {@link org.apache.beam.sdk.values.PCollection}.
- *
- * <p>See {@link ParDo} for more explanation, examples of use, and
- * discussion of constraints on {@code DoFn}s, including their
- * serializability, lack of access to global shared mutable state,
- * requirements for failure tolerance, and benefits of optimization.
- *
- * <p>{@code DoFn}s can be tested in the context of a particular
- * {@code Pipeline} by running that {@code Pipeline} on sample input
- * and then checking its output.  Unit testing of a {@code DoFn},
- * separately from any {@code ParDo} transform or {@code Pipeline},
- * can be done via the {@link DoFnTester} harness.
- *
- * <p>{@link DoFnWithContext} (currently experimental) offers an alternative
- * mechanism for accessing {@link ProcessContext#window()} without the need
- * to implement {@link RequiresWindowAccess}.
- *
- * <p>See also {@link #processElement} for details on implementing the transformation
- * from {@code InputT} to {@code OutputT}.
- *
- * @param <InputT> the type of the (main) input elements
- * @param <OutputT> the type of the (main) output elements
- */
-public abstract class DoFn<InputT, OutputT> implements Serializable, HasDisplayData {
-
-  /**
-   * Information accessible to all methods in this {@code DoFn}.
-   * Used primarily to output elements.
-   */
-  public abstract class Context {
-
-    /**
-     * Returns the {@code PipelineOptions} specified with the
-     * {@link org.apache.beam.sdk.runners.PipelineRunner}
-     * invoking this {@code DoFn}.  The {@code PipelineOptions} will
-     * be the default running via {@link DoFnTester}.
-     */
-    public abstract PipelineOptions getPipelineOptions();
-
-    /**
-     * Adds the given element to the main output {@code PCollection}.
-     *
-     * <p>Once passed to {@code output} the element should be considered
-     * immutable and not be modified in any way. It may be cached or retained
-     * by the Dataflow runtime or later steps in the pipeline, or used in
-     * other unspecified ways.
-     *
-     * <p>If invoked from {@link DoFn#processElement processElement}, the output
-     * element will have the same timestamp and be in the same windows
-     * as the input element passed to {@link DoFn#processElement processElement}.
-     *
-     * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
-     * this will attempt to use the
-     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
-     * of the input {@code PCollection} to determine what windows the element
-     * should be in, throwing an exception if the {@code WindowFn} attempts
-     * to access any information about the input element. The output element
-     * will have a timestamp of negative infinity.
-     */
-    public abstract void output(OutputT output);
-
-    /**
-     * Adds the given element to the main output {@code PCollection},
-     * with the given timestamp.
-     *
-     * <p>Once passed to {@code outputWithTimestamp} the element should not be
-     * modified in any way.
-     *
-     * <p>If invoked from {@link DoFn#processElement processElement}, the timestamp
-     * must not be older than the input element's timestamp minus
-     * {@link DoFn#getAllowedTimestampSkew getAllowedTimestampSkew}.  The output element will
-     * be in the same windows as the input element.
-     *
-     * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
-     * this will attempt to use the
-     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
-     * of the input {@code PCollection} to determine what windows the element
-     * should be in, throwing an exception if the {@code WindowFn} attempts
-     * to access any information about the input element except for the
-     * timestamp.
-     */
-    public abstract void outputWithTimestamp(OutputT output, Instant timestamp);
-
-    /**
-     * Adds the given element to the side output {@code PCollection} with the
-     * given tag.
-     *
-     * <p>Once passed to {@code sideOutput} the element should not be modified
-     * in any way.
-     *
-     * <p>The caller of {@code ParDo} uses {@link ParDo#withOutputTags withOutputTags} to
-     * specify the tags of side outputs that it consumes. Non-consumed side
-     * outputs, e.g., outputs for monitoring purposes only, don't necessarily
-     * need to be specified.
-     *
-     * <p>The output element will have the same timestamp and be in the same
-     * windows as the input element passed to {@link DoFn#processElement processElement}.
-     *
-     * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
-     * this will attempt to use the
-     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
-     * of the input {@code PCollection} to determine what windows the element
-     * should be in, throwing an exception if the {@code WindowFn} attempts
-     * to access any information about the input element. The output element
-     * will have a timestamp of negative infinity.
-     *
-     * @see ParDo#withOutputTags
-     */
-    public abstract <T> void sideOutput(TupleTag<T> tag, T output);
-
-    /**
-     * Adds the given element to the specified side output {@code PCollection},
-     * with the given timestamp.
-     *
-     * <p>Once passed to {@code sideOutputWithTimestamp} the element should not be
-     * modified in any way.
-     *
-     * <p>If invoked from {@link DoFn#processElement processElement}, the timestamp
-     * must not be older than the input element's timestamp minus
-     * {@link DoFn#getAllowedTimestampSkew getAllowedTimestampSkew}.  The output element will
-     * be in the same windows as the input element.
-     *
-     * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
-     * this will attempt to use the
-     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
-     * of the input {@code PCollection} to determine what windows the element
-     * should be in, throwing an exception if the {@code WindowFn} attempts
-     * to access any information about the input element except for the
-     * timestamp.
-     *
-     * @see ParDo#withOutputTags
-     */
-    public abstract <T> void sideOutputWithTimestamp(
-        TupleTag<T> tag, T output, Instant timestamp);
-
-    /**
-     * Creates an {@link Aggregator} in the {@link DoFn} context with the
-     * specified name and aggregation logic specified by {@link CombineFn}.
-     *
-     * <p>For internal use only.
-     *
-     * @param name the name of the aggregator
-     * @param combiner the {@link CombineFn} to use in the aggregator
-     * @return an aggregator for the provided name and {@link CombineFn} in this
-     *         context
-     */
-    @Experimental(Kind.AGGREGATOR)
-    protected abstract <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
-        createAggregatorInternal(String name, CombineFn<AggInputT, ?, AggOutputT> combiner);
-
-    /**
-     * Sets up {@link Aggregator}s created by the {@link DoFn} so they are
-     * usable within this context.
-     *
-     * <p>This method should be called by runners before {@link DoFn#startBundle}
-     * is executed.
-     */
-    @Experimental(Kind.AGGREGATOR)
-    protected final void setupDelegateAggregators() {
-      for (DelegatingAggregator<?, ?> aggregator : aggregators.values()) {
-        setupDelegateAggregator(aggregator);
-      }
-
-      aggregatorsAreFinal = true;
-    }
-
-    private final <AggInputT, AggOutputT> void setupDelegateAggregator(
-        DelegatingAggregator<AggInputT, AggOutputT> aggregator) {
-
-      Aggregator<AggInputT, AggOutputT> delegate = createAggregatorInternal(
-          aggregator.getName(), aggregator.getCombineFn());
-
-      aggregator.setDelegate(delegate);
-    }
-  }
-
-  /**
-   * Information accessible when running {@link DoFn#processElement}.
-   */
-  public abstract class ProcessContext extends Context {
-
-    /**
-     * Returns the input element to be processed.
-     *
-     * <p>The element should be considered immutable. The Dataflow runtime will not mutate the
-     * element, so it is safe to cache, etc. The element should not be mutated by any of the
-     * {@link DoFn} methods, because it may be cached elsewhere, retained by the Dataflow runtime,
-     * or used in other unspecified ways.
-     */
-    public abstract InputT element();
-
-    /**
-     * Returns the value of the side input for the window corresponding to the
-     * window of the main input element.
-     *
-     * <p>See
-     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn#getSideInputWindow}
-     * for how this corresponding window is determined.
-     *
-     * @throws IllegalArgumentException if this is not a side input
-     * @see ParDo#withSideInputs
-     */
-    public abstract <T> T sideInput(PCollectionView<T> view);
-
-    /**
-     * Returns the timestamp of the input element.
-     *
-     * <p>See {@link org.apache.beam.sdk.transforms.windowing.Window}
-     * for more information.
-     */
-    public abstract Instant timestamp();
-
-    /**
-     * Returns the window into which the input element has been assigned.
-     *
-     * <p>See {@link org.apache.beam.sdk.transforms.windowing.Window}
-     * for more information.
-     *
-     * @throws UnsupportedOperationException if this {@link DoFn} does
-     * not implement {@link RequiresWindowAccess}.
-     */
-    public abstract BoundedWindow window();
-
-    /**
-     * Returns information about the pane within this window into which the
-     * input element has been assigned.
-     *
-     * <p>Generally all data is in a single, uninteresting pane unless custom
-     * triggering and/or late data has been explicitly requested.
-     * See {@link org.apache.beam.sdk.transforms.windowing.Window}
-     * for more information.
-     */
-    public abstract PaneInfo pane();
-
-    /**
-     * Returns the process context to use for implementing windowing.
-     */
-    @Experimental
-    public abstract WindowingInternals<InputT, OutputT> windowingInternals();
-  }
-
-  /**
-   * Returns the allowed timestamp skew duration, which is the maximum
-   * duration that timestamps can be shifted backward in
-   * {@link DoFn.Context#outputWithTimestamp}.
-   *
-   * <p>The default value is {@code Duration.ZERO}, in which case
-   * timestamps can only be shifted forward to future.  For infinite
-   * skew, return {@code Duration.millis(Long.MAX_VALUE)}.
-   *
-   * <p> Note that producing an element whose timestamp is less than the
-   * current timestamp may result in late data, i.e. returning a non-zero
-   * value here does not impact watermark calculations used for firing
-   * windows.
-   *
-   * @deprecated does not interact well with the watermark.
-   */
-  @Deprecated
-  public Duration getAllowedTimestampSkew() {
-    return Duration.ZERO;
-  }
-
-  /**
-   * Interface for signaling that a {@link DoFn} needs to access the window the
-   * element is being processed in, via {@link DoFn.ProcessContext#window}.
-   */
-  @Experimental
-  public interface RequiresWindowAccess {}
-
-  public DoFn() {
-    this(new HashMap<String, DelegatingAggregator<?, ?>>());
-  }
-
-  DoFn(Map<String, DelegatingAggregator<?, ?>> aggregators) {
-    this.aggregators = aggregators;
-  }
-
-  /////////////////////////////////////////////////////////////////////////////
-
-  private final Map<String, DelegatingAggregator<?, ?>> aggregators;
-
-  /**
-   * Protects aggregators from being created after initialization.
-   */
-  private boolean aggregatorsAreFinal;
-
-  /**
-   * Prepares this {@code DoFn} instance for processing a batch of elements.
-   *
-   * <p>By default, does nothing.
-   */
-  public void startBundle(Context c) throws Exception {
-  }
-
-  /**
-   * Processes one input element.
-   *
-   * <p>The current element of the input {@code PCollection} is returned by
-   * {@link ProcessContext#element() c.element()}. It should be considered immutable. The Dataflow
-   * runtime will not mutate the element, so it is safe to cache, etc. The element should not be
-   * mutated by any of the {@link DoFn} methods, because it may be cached elsewhere, retained by the
-   * Dataflow runtime, or used in other unspecified ways.
-   *
-   * <p>A value is added to the main output {@code PCollection} by {@link ProcessContext#output}.
-   * Once passed to {@code output} the element should be considered immutable and not be modified in
-   * any way. It may be cached elsewhere, retained by the Dataflow runtime, or used in other
-   * unspecified ways.
-   *
-   * @see ProcessContext
-   */
-  public abstract void processElement(ProcessContext c) throws Exception;
-
-  /**
-   * Finishes processing this batch of elements.
-   *
-   * <p>By default, does nothing.
-   */
-  public void finishBundle(Context c) throws Exception {
-  }
-
-  /**
-   * {@inheritDoc}
-   *
-   * <p>By default, does not register any display data. Implementors may override this method
-   * to provide their own display data.
-   */
-  @Override
-  public void populateDisplayData(DisplayData.Builder builder) {
-  }
-
-  /////////////////////////////////////////////////////////////////////////////
-
-  /**
-   * Returns a {@link TypeDescriptor} capturing what is known statically
-   * about the input type of this {@code DoFn} instance's most-derived
-   * class.
-   *
-   * <p>See {@link #getOutputTypeDescriptor} for more discussion.
-   */
-  protected TypeDescriptor<InputT> getInputTypeDescriptor() {
-    return new TypeDescriptor<InputT>(getClass()) {};
-  }
-
-  /**
-   * Returns a {@link TypeDescriptor} capturing what is known statically
-   * about the output type of this {@code DoFn} instance's
-   * most-derived class.
-   *
-   * <p>In the normal case of a concrete {@code DoFn} subclass with
-   * no generic type parameters of its own (including anonymous inner
-   * classes), this will be a complete non-generic type, which is good
-   * for choosing a default output {@code Coder<OutputT>} for the output
-   * {@code PCollection<OutputT>}.
-   */
-  protected TypeDescriptor<OutputT> getOutputTypeDescriptor() {
-    return new TypeDescriptor<OutputT>(getClass()) {};
-  }
-
-  /**
-   * Returns an {@link Aggregator} with aggregation logic specified by the
-   * {@link CombineFn} argument. The name provided must be unique across
-   * {@link Aggregator}s created within the DoFn. Aggregators can only be created
-   * during pipeline construction.
-   *
-   * @param name the name of the aggregator
-   * @param combiner the {@link CombineFn} to use in the aggregator
-   * @return an aggregator for the provided name and combiner in the scope of
-   *         this DoFn
-   * @throws NullPointerException if the name or combiner is null
-   * @throws IllegalArgumentException if the given name collides with another
-   *         aggregator in this scope
-   * @throws IllegalStateException if called during pipeline processing.
-   */
-  protected final <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
-      createAggregator(String name, CombineFn<? super AggInputT, ?, AggOutputT> combiner) {
-    checkNotNull(name, "name cannot be null");
-    checkNotNull(combiner, "combiner cannot be null");
-    checkArgument(!aggregators.containsKey(name),
-        "Cannot create aggregator with name %s."
-        + " An Aggregator with that name already exists within this scope.",
-        name);
-
-    checkState(!aggregatorsAreFinal, "Cannot create an aggregator during DoFn processing."
-        + " Aggregators should be registered during pipeline construction.");
-
-    DelegatingAggregator<AggInputT, AggOutputT> aggregator =
-        new DelegatingAggregator<>(name, combiner);
-    aggregators.put(name, aggregator);
-    return aggregator;
-  }
-
-  /**
-   * Returns an {@link Aggregator} with the aggregation logic specified by the
-   * {@link SerializableFunction} argument. The name provided must be unique
-   * across {@link Aggregator}s created within the DoFn. Aggregators can only be
-   * created during pipeline construction.
-   *
-   * @param name the name of the aggregator
-   * @param combiner the {@link SerializableFunction} to use in the aggregator
-   * @return an aggregator for the provided name and combiner in the scope of
-   *         this DoFn
-   * @throws NullPointerException if the name or combiner is null
-   * @throws IllegalArgumentException if the given name collides with another
-   *         aggregator in this scope
-   * @throws IllegalStateException if called during pipeline processing.
-   */
-  protected final <AggInputT> Aggregator<AggInputT, AggInputT> createAggregator(String name,
-      SerializableFunction<Iterable<AggInputT>, AggInputT> combiner) {
-    checkNotNull(combiner, "combiner cannot be null.");
-    return createAggregator(name, Combine.IterableCombineFn.of(combiner));
-  }
-
-  /**
-   * Returns the {@link Aggregator Aggregators} created by this {@code DoFn}.
-   */
-  Collection<Aggregator<?, ?>> getAggregators() {
-    return Collections.<Aggregator<?, ?>>unmodifiableCollection(aggregators.values());
-  }
-
-  /**
-   * An {@link Aggregator} that delegates calls to addValue to another
-   * aggregator.
-   *
-   * @param <AggInputT> the type of input element
-   * @param <AggOutputT> the type of output element
-   */
-  static class DelegatingAggregator<AggInputT, AggOutputT> implements
-      Aggregator<AggInputT, AggOutputT>, Serializable {
-    private final UUID id;
-
-    private final String name;
-
-    private final CombineFn<AggInputT, ?, AggOutputT> combineFn;
-
-    private Aggregator<AggInputT, ?> delegate;
-
-    public DelegatingAggregator(String name,
-        CombineFn<? super AggInputT, ?, AggOutputT> combiner) {
-      this.id = UUID.randomUUID();
-      this.name = checkNotNull(name, "name cannot be null");
-      // Safe contravariant cast
-      @SuppressWarnings("unchecked")
-      CombineFn<AggInputT, ?, AggOutputT> specificCombiner =
-          (CombineFn<AggInputT, ?, AggOutputT>) checkNotNull(combiner, "combineFn cannot be null");
-      this.combineFn = specificCombiner;
-    }
-
-    @Override
-    public void addValue(AggInputT value) {
-      if (delegate == null) {
-        throw new IllegalStateException(
-            "addValue cannot be called on Aggregator outside of the execution of a DoFn.");
-      } else {
-        delegate.addValue(value);
-      }
-    }
-
-    @Override
-    public String getName() {
-      return name;
-    }
-
-    @Override
-    public CombineFn<AggInputT, ?, AggOutputT> getCombineFn() {
-      return combineFn;
-    }
-
-    /**
-     * Sets the current delegate of the Aggregator.
-     *
-     * @param delegate the delegate to set in this aggregator
-     */
-    public void setDelegate(Aggregator<AggInputT, ?> delegate) {
-      this.delegate = delegate;
-    }
-
-    @Override
-    public String toString() {
-      return MoreObjects.toStringHelper(getClass())
-          .add("name", name)
-          .add("combineFn", combineFn)
-          .toString();
-    }
-
-    @Override
-    public int hashCode() {
-      return Objects.hash(id, name, combineFn.getClass());
-    }
-
-    /**
-     * Indicates whether some other object is "equal to" this one.
-     *
-     * <p>{@code DelegatingAggregator} instances are equal if they have the same name, their
-     * CombineFns are the same class, and they have identical IDs.
-     */
-    @Override
-    public boolean equals(Object o) {
-      if (o == this) {
-        return true;
-      }
-      if (o == null) {
-        return false;
-      }
-      if (o instanceof DelegatingAggregator) {
-        DelegatingAggregator<?, ?> that = (DelegatingAggregator<?, ?>) o;
-        return Objects.equals(this.id, that.id)
-            && Objects.equals(this.name, that.name)
-            && Objects.equals(this.combineFn.getClass(), that.combineFn.getClass());
-      }
-      return false;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
index 0616eff..d8d4181 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
@@ -194,7 +194,7 @@ public abstract class DoFnReflector {
    */
   public abstract boolean usesSingleWindow();
 
-  /** Create an {@link DoFnInvoker} bound to the given {@link DoFn}. */
+  /** Create an {@link DoFnInvoker} bound to the given {@link OldDoFn}. */
   public abstract <InputT, OutputT> DoFnInvoker<InputT, OutputT> bindInvoker(
       DoFnWithContext<InputT, OutputT> fn);
 
@@ -217,9 +217,9 @@ public abstract class DoFnReflector {
   }
 
   /**
-   * Create a {@link DoFn} that the {@link DoFnWithContext}.
+   * Create a {@link OldDoFn} that the {@link DoFnWithContext}.
    */
-  public <InputT, OutputT> DoFn<InputT, OutputT> toDoFn(DoFnWithContext<InputT, OutputT> fn) {
+  public <InputT, OutputT> OldDoFn<InputT, OutputT> toDoFn(DoFnWithContext<InputT, OutputT> fn) {
     if (usesSingleWindow()) {
       return new WindowDoFnAdapter<InputT, OutputT>(this, fn);
     } else {
@@ -287,7 +287,7 @@ public abstract class DoFnReflector {
    * <li>Any generics on the extra context arguments match what is expected. Eg.,
    *     {@code WindowingInternals<InputT, OutputT>} either matches the
    *     {@code InputT} and {@code OutputT} parameters of the
-   *     {@code DoFn<InputT, OutputT>.ProcessContext}, or it uses a wildcard, etc.
+   *     {@code OldDoFn<InputT, OutputT>.ProcessContext}, or it uses a wildcard, etc.
    * </ol>
    *
    * @param m the method to verify
@@ -328,7 +328,7 @@ public abstract class DoFnReflector {
     AdditionalParameter[] contextInfos = new AdditionalParameter[params.length - 1];
 
     // Fill in the generics in the allExtraContextArgs interface from the types in the
-    // Context or ProcessContext DoFn.
+    // Context or ProcessContext OldDoFn.
     ParameterizedType pt = (ParameterizedType) contextToken.getType();
     // We actually want the owner, since ProcessContext and Context are owned by DoFnWithContext.
     pt = (ParameterizedType) pt.getOwnerType();
@@ -364,18 +364,18 @@ public abstract class DoFnReflector {
     return ImmutableList.copyOf(contextInfos);
   }
 
-  /** Interface for invoking the {@code DoFn} processing methods. */
+  /** Interface for invoking the {@code OldDoFn} processing methods. */
   public interface DoFnInvoker<InputT, OutputT>  {
-    /** Invoke {@link DoFn#startBundle} on the bound {@code DoFn}. */
+    /** Invoke {@link OldDoFn#startBundle} on the bound {@code OldDoFn}. */
     void invokeStartBundle(
         DoFnWithContext<InputT, OutputT>.Context c,
         ExtraContextFactory<InputT, OutputT> extra);
-    /** Invoke {@link DoFn#finishBundle} on the bound {@code DoFn}. */
+    /** Invoke {@link OldDoFn#finishBundle} on the bound {@code OldDoFn}. */
     void invokeFinishBundle(
         DoFnWithContext<InputT, OutputT>.Context c,
         ExtraContextFactory<InputT, OutputT> extra);
 
-    /** Invoke {@link DoFn#processElement} on the bound {@code DoFn}. */
+    /** Invoke {@link OldDoFn#processElement} on the bound {@code OldDoFn}. */
     public void invokeProcessElement(
         DoFnWithContext<InputT, OutputT>.ProcessContext c,
         ExtraContextFactory<InputT, OutputT> extra);
@@ -565,10 +565,10 @@ public abstract class DoFnReflector {
       extends DoFnWithContext<InputT, OutputT>.Context
       implements DoFnWithContext.ExtraContextFactory<InputT, OutputT> {
 
-    private DoFn<InputT, OutputT>.Context context;
+    private OldDoFn<InputT, OutputT>.Context context;
 
     private ContextAdapter(
-        DoFnWithContext<InputT, OutputT> fn, DoFn<InputT, OutputT>.Context context) {
+        DoFnWithContext<InputT, OutputT> fn, OldDoFn<InputT, OutputT>.Context context) {
       fn.super();
       this.context = context;
     }
@@ -618,11 +618,11 @@ public abstract class DoFnReflector {
       extends DoFnWithContext<InputT, OutputT>.ProcessContext
       implements DoFnWithContext.ExtraContextFactory<InputT, OutputT> {
 
-    private DoFn<InputT, OutputT>.ProcessContext context;
+    private OldDoFn<InputT, OutputT>.ProcessContext context;
 
     private ProcessContextAdapter(
         DoFnWithContext<InputT, OutputT> fn,
-        DoFn<InputT, OutputT>.ProcessContext context) {
+        OldDoFn<InputT, OutputT>.ProcessContext context) {
       fn.super();
       this.context = context;
     }
@@ -683,7 +683,7 @@ public abstract class DoFnReflector {
     }
   }
 
-  public static Class<?> getDoFnClass(DoFn<?, ?> fn) {
+  public static Class<?> getDoFnClass(OldDoFn<?, ?> fn) {
     if (fn instanceof SimpleDoFnAdapter) {
       return ((SimpleDoFnAdapter<?, ?>) fn).fn.getClass();
     } else {
@@ -691,7 +691,7 @@ public abstract class DoFnReflector {
     }
   }
 
-  private static class SimpleDoFnAdapter<InputT, OutputT> extends DoFn<InputT, OutputT> {
+  private static class SimpleDoFnAdapter<InputT, OutputT> extends OldDoFn<InputT, OutputT> {
 
     private final DoFnWithContext<InputT, OutputT> fn;
     private transient DoFnInvoker<InputT, OutputT> invoker;
@@ -703,19 +703,19 @@ public abstract class DoFnReflector {
     }
 
     @Override
-    public void startBundle(DoFn<InputT, OutputT>.Context c) throws Exception {
+    public void startBundle(OldDoFn<InputT, OutputT>.Context c) throws Exception {
       ContextAdapter<InputT, OutputT> adapter = new ContextAdapter<>(fn, c);
       invoker.invokeStartBundle(adapter, adapter);
     }
 
     @Override
-    public void finishBundle(DoFn<InputT, OutputT>.Context c) throws Exception {
+    public void finishBundle(OldDoFn<InputT, OutputT>.Context c) throws Exception {
       ContextAdapter<InputT, OutputT> adapter = new ContextAdapter<>(fn, c);
       invoker.invokeFinishBundle(adapter, adapter);
     }
 
     @Override
-    public void processElement(DoFn<InputT, OutputT>.ProcessContext c) throws Exception {
+    public void processElement(OldDoFn<InputT, OutputT>.ProcessContext c) throws Exception {
       ProcessContextAdapter<InputT, OutputT> adapter = new ProcessContextAdapter<>(fn, c);
       invoker.invokeProcessElement(adapter, adapter);
     }
@@ -743,7 +743,7 @@ public abstract class DoFnReflector {
   }
 
   private static class WindowDoFnAdapter<InputT, OutputT>
-  extends SimpleDoFnAdapter<InputT, OutputT> implements DoFn.RequiresWindowAccess {
+  extends SimpleDoFnAdapter<InputT, OutputT> implements OldDoFn.RequiresWindowAccess {
 
     private WindowDoFnAdapter(DoFnReflector reflector, DoFnWithContext<InputT, OutputT> fn) {
       super(reflector, fn);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
index a136632..9336e4c 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
@@ -49,12 +49,12 @@ import java.util.List;
 import java.util.Map;
 
 /**
- * A harness for unit-testing a {@link DoFn}.
+ * A harness for unit-testing a {@link OldDoFn}.
  *
  * <p>For example:
  *
  * <pre> {@code
- * DoFn<InputT, OutputT> fn = ...;
+ * OldDoFn<InputT, OutputT> fn = ...;
  *
  * DoFnTester<InputT, OutputT> fnTester = DoFnTester.of(fn);
  *
@@ -71,22 +71,22 @@ import java.util.Map;
  * Assert.assertThat(fnTester.processBundle(i1, i2, ...), Matchers.hasItems(...));
  * } </pre>
  *
- * @param <InputT> the type of the {@code DoFn}'s (main) input elements
- * @param <OutputT> the type of the {@code DoFn}'s (main) output elements
+ * @param <InputT> the type of the {@code OldDoFn}'s (main) input elements
+ * @param <OutputT> the type of the {@code OldDoFn}'s (main) output elements
  */
 public class DoFnTester<InputT, OutputT> {
   /**
    * Returns a {@code DoFnTester} supporting unit-testing of the given
-   * {@link DoFn}.
+   * {@link OldDoFn}.
    */
   @SuppressWarnings("unchecked")
-  public static <InputT, OutputT> DoFnTester<InputT, OutputT> of(DoFn<InputT, OutputT> fn) {
+  public static <InputT, OutputT> DoFnTester<InputT, OutputT> of(OldDoFn<InputT, OutputT> fn) {
     return new DoFnTester<InputT, OutputT>(fn);
   }
 
   /**
    * Returns a {@code DoFnTester} supporting unit-testing of the given
-   * {@link DoFn}.
+   * {@link OldDoFn}.
    */
   @SuppressWarnings("unchecked")
   public static <InputT, OutputT> DoFnTester<InputT, OutputT>
@@ -96,12 +96,12 @@ public class DoFnTester<InputT, OutputT> {
 
   /**
    * Registers the tuple of values of the side input {@link PCollectionView}s to
-   * pass to the {@link DoFn} under test.
+   * pass to the {@link OldDoFn} under test.
    *
    * <p>Resets the state of this {@link DoFnTester}.
    *
    * <p>If this isn't called, {@code DoFnTester} assumes the
-   * {@link DoFn} takes no side inputs.
+   * {@link OldDoFn} takes no side inputs.
    */
   public void setSideInputs(Map<PCollectionView<?>, Map<BoundedWindow, ?>> sideInputs) {
     this.sideInputs = sideInputs;
@@ -109,8 +109,8 @@ public class DoFnTester<InputT, OutputT> {
   }
 
   /**
-   * Registers the values of a side input {@link PCollectionView} to pass to the {@link DoFn} under
-   * test.
+   * Registers the values of a side input {@link PCollectionView} to pass to the {@link OldDoFn}
+   * under test.
    *
    * <p>The provided value is the final value of the side input in the specified window, not
    * the value of the input PCollection in that window.
@@ -128,7 +128,7 @@ public class DoFnTester<InputT, OutputT> {
   }
 
   /**
-   * Whether or not a {@link DoFnTester} should clone the {@link DoFn} under test.
+   * Whether or not a {@link DoFnTester} should clone the {@link OldDoFn} under test.
    */
   public enum CloningBehavior {
     CLONE,
@@ -136,14 +136,14 @@ public class DoFnTester<InputT, OutputT> {
   }
 
   /**
-   * Instruct this {@link DoFnTester} whether or not to clone the {@link DoFn} under test.
+   * Instruct this {@link DoFnTester} whether or not to clone the {@link OldDoFn} under test.
    */
   public void setCloningBehavior(CloningBehavior newValue) {
     this.cloningBehavior = newValue;
   }
 
   /**
-   *  Indicates whether this {@link DoFnTester} will clone the {@link DoFn} under test.
+   *  Indicates whether this {@link DoFnTester} will clone the {@link OldDoFn} under test.
    */
   public CloningBehavior getCloningBehavior() {
     return cloningBehavior;
@@ -165,7 +165,7 @@ public class DoFnTester<InputT, OutputT> {
   }
 
   /**
-   * A convenience method for testing {@link DoFn DoFns} with bundles of elements.
+   * A convenience method for testing {@link OldDoFn DoFns} with bundles of elements.
    * Logic proceeds as follows:
    *
    * <ol>
@@ -181,9 +181,9 @@ public class DoFnTester<InputT, OutputT> {
   }
 
   /**
-   * Calls {@link DoFn#startBundle} on the {@code DoFn} under test.
+   * Calls {@link OldDoFn#startBundle} on the {@code OldDoFn} under test.
    *
-   * <p>If needed, first creates a fresh instance of the DoFn under test.
+   * <p>If needed, first creates a fresh instance of the OldDoFn under test.
    */
   public void startBundle() throws Exception {
     resetState();
@@ -195,14 +195,14 @@ public class DoFnTester<InputT, OutputT> {
   }
 
   /**
-   * Calls {@link DoFn#processElement} on the {@code DoFn} under test, in a
-   * context where {@link DoFn.ProcessContext#element} returns the
+   * Calls {@link OldDoFn#processElement} on the {@code OldDoFn} under test, in a
+   * context where {@link OldDoFn.ProcessContext#element} returns the
    * given element.
    *
    * <p>Will call {@link #startBundle} automatically, if it hasn't
    * already been called.
    *
-   * @throws IllegalStateException if the {@code DoFn} under test has already
+   * @throws IllegalStateException if the {@code OldDoFn} under test has already
    * been finished
    */
   public void processElement(InputT element) throws Exception {
@@ -216,12 +216,12 @@ public class DoFnTester<InputT, OutputT> {
   }
 
   /**
-   * Calls {@link DoFn#finishBundle} of the {@code DoFn} under test.
+   * Calls {@link OldDoFn#finishBundle} of the {@code OldDoFn} under test.
    *
    * <p>Will call {@link #startBundle} automatically, if it hasn't
    * already been called.
    *
-   * @throws IllegalStateException if the {@code DoFn} under test has already
+   * @throws IllegalStateException if the {@code OldDoFn} under test has already
    * been finished
    */
   public void finishBundle() throws Exception {
@@ -403,18 +403,18 @@ public class DoFnTester<InputT, OutputT> {
     return MoreObjects.firstNonNull(elems, Collections.<WindowedValue<T>>emptyList());
   }
 
-  private TestContext<InputT, OutputT> createContext(DoFn<InputT, OutputT> fn) {
+  private TestContext<InputT, OutputT> createContext(OldDoFn<InputT, OutputT> fn) {
     return new TestContext<>(fn, options, mainOutputTag, outputs, accumulators);
   }
 
-  private static class TestContext<InT, OutT> extends DoFn<InT, OutT>.Context {
+  private static class TestContext<InT, OutT> extends OldDoFn<InT, OutT>.Context {
     private final PipelineOptions opts;
     private final TupleTag<OutT> mainOutputTag;
     private final Map<TupleTag<?>, List<WindowedValue<?>>> outputs;
     private final Map<String, Object> accumulators;
 
     public TestContext(
-        DoFn<InT, OutT> fn,
+        OldDoFn<InT, OutT> fn,
         PipelineOptions opts,
         TupleTag<OutT> mainOutputTag,
         Map<TupleTag<?>, List<WindowedValue<?>>> outputs,
@@ -498,7 +498,7 @@ public class DoFnTester<InputT, OutputT> {
   }
 
   private TestProcessContext<InputT, OutputT> createProcessContext(
-      DoFn<InputT, OutputT> fn,
+      OldDoFn<InputT, OutputT> fn,
       InputT elem) {
     return new TestProcessContext<>(fn,
         createContext(fn),
@@ -507,14 +507,14 @@ public class DoFnTester<InputT, OutputT> {
         sideInputs);
   }
 
-  private static class TestProcessContext<InT, OutT> extends DoFn<InT, OutT>.ProcessContext {
+  private static class TestProcessContext<InT, OutT> extends OldDoFn<InT, OutT>.ProcessContext {
     private final TestContext<InT, OutT> context;
     private final TupleTag<OutT> mainOutputTag;
     private final WindowedValue<InT> element;
     private final Map<PCollectionView<?>, Map<BoundedWindow, ?>> sideInputs;
 
     private TestProcessContext(
-        DoFn<InT, OutT> fn,
+        OldDoFn<InT, OutT> fn,
         TestContext<InT, OutT> context,
         WindowedValue<InT> element,
         TupleTag<OutT> mainOutputTag,
@@ -643,15 +643,15 @@ public class DoFnTester<InputT, OutputT> {
     protected <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT> createAggregatorInternal(
         String name, CombineFn<AggInputT, ?, AggOutputT> combiner) {
       throw new IllegalStateException("Aggregators should not be created within ProcessContext. "
-          + "Instead, create an aggregator at DoFn construction time with createAggregatorForDoFn,"
-          + " and ensure they are set up by the time startBundle is called "
-          + "with setupDelegateAggregators.");
+          + "Instead, create an aggregator at OldDoFn construction time with"
+          + " createAggregatorForDoFn, and ensure they are set up by the time startBundle is"
+          + " called with setupDelegateAggregators.");
     }
   }
 
   /////////////////////////////////////////////////////////////////////////////
 
-  /** The possible states of processing a DoFn. */
+  /** The possible states of processing a OldDoFn. */
   enum State {
     UNSTARTED,
     STARTED,
@@ -660,35 +660,35 @@ public class DoFnTester<InputT, OutputT> {
 
   private final PipelineOptions options = PipelineOptionsFactory.create();
 
-  /** The original DoFn under test. */
-  private final DoFn<InputT, OutputT> origFn;
+  /** The original OldDoFn under test. */
+  private final OldDoFn<InputT, OutputT> origFn;
 
   /**
-   * Whether to clone the original {@link DoFn} or just use it as-is.
+   * Whether to clone the original {@link OldDoFn} or just use it as-is.
    *
-   * <p></p>Worker-side {@link DoFn DoFns} may not be serializable, and are not required to be.
+   * <p></p>Worker-side {@link OldDoFn DoFns} may not be serializable, and are not required to be.
    */
   private CloningBehavior cloningBehavior = CloningBehavior.CLONE;
 
-  /** The side input values to provide to the DoFn under test. */
+  /** The side input values to provide to the OldDoFn under test. */
   private Map<PCollectionView<?>, Map<BoundedWindow, ?>> sideInputs =
       new HashMap<>();
 
   private Map<String, Object> accumulators;
 
-  /** The output tags used by the DoFn under test. */
+  /** The output tags used by the OldDoFn under test. */
   private TupleTag<OutputT> mainOutputTag = new TupleTag<>();
 
-  /** The original DoFn under test, if started. */
-  DoFn<InputT, OutputT> fn;
+  /** The original OldDoFn under test, if started. */
+  OldDoFn<InputT, OutputT> fn;
 
   /** The ListOutputManager to examine the outputs. */
   private Map<TupleTag<?>, List<WindowedValue<?>>> outputs;
 
-  /** The state of processing of the DoFn under test. */
+  /** The state of processing of the OldDoFn under test. */
   private State state;
 
-  private DoFnTester(DoFn<InputT, OutputT> origFn) {
+  private DoFnTester(OldDoFn<InputT, OutputT> origFn) {
     this.origFn = origFn;
     resetState();
   }
@@ -705,7 +705,7 @@ public class DoFnTester<InputT, OutputT> {
     if (cloningBehavior.equals(CloningBehavior.DO_NOT_CLONE)) {
       fn = origFn;
     } else {
-      fn = (DoFn<InputT, OutputT>)
+      fn = (OldDoFn<InputT, OutputT>)
           SerializableUtils.deserializeFromByteArray(
               SerializableUtils.serializeToByteArray(origFn),
               origFn.toString());


[38/51] [abbrv] incubator-beam git commit: Port PAssert to new DoFn

Posted by ke...@apache.org.
Port PAssert to new DoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/ef5e31f8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/ef5e31f8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/ef5e31f8

Branch: refs/heads/python-sdk
Commit: ef5e31f8b79dcedf8feb4bba0e313bfcf330ab1e
Parents: 1959ddb
Author: Kenneth Knowles <kl...@google.com>
Authored: Wed Aug 3 20:15:58 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 14:56:42 2016 -0700

----------------------------------------------------------------------
 .../org/apache/beam/sdk/testing/PAssert.java    | 39 ++++++++++----------
 1 file changed, 19 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ef5e31f8/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
index 80340c2..e07ee3d 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
@@ -33,11 +33,10 @@ import org.apache.beam.sdk.options.StreamingOptions;
 import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.MapElements;
-import org.apache.beam.sdk.transforms.OldDoFn;
-import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -762,8 +761,8 @@ public class PAssert {
           .apply("RewindowActuals", rewindowActuals.<T>windowActuals())
           .apply(
               ParDo.of(
-                  new OldDoFn<T, T>() {
-                    @Override
+                  new DoFn<T, T>() {
+                    @ProcessElement
                     public void processElement(ProcessContext context) throws CoderException {
                       context.output(CoderUtils.clone(coder, context.element()));
                     }
@@ -884,8 +883,8 @@ public class PAssert {
     }
   }
 
-  private static final class ConcatFn<T> extends OldDoFn<Iterable<Iterable<T>>, Iterable<T>> {
-    @Override
+  private static final class ConcatFn<T> extends DoFn<Iterable<Iterable<T>>, Iterable<T>> {
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       c.output(Iterables.concat(c.element()));
     }
@@ -995,13 +994,13 @@ public class PAssert {
   }
 
   /**
-   * A {@link OldDoFn} that runs a checking {@link SerializableFunction} on the contents of a
+   * A {@link DoFn} that runs a checking {@link SerializableFunction} on the contents of a
    * {@link PCollectionView}, and adjusts counters and thrown exceptions for use in testing.
    *
    * <p>The input is ignored, but is {@link Integer} to be usable on runners that do not support
    * null values.
    */
-  private static class SideInputCheckerDoFn<ActualT> extends OldDoFn<Integer, Void> {
+  private static class SideInputCheckerDoFn<ActualT> extends DoFn<Integer, Void> {
     private final SerializableFunction<ActualT, Void> checkerFn;
     private final Aggregator<Integer, Integer> success =
         createAggregator(SUCCESS_COUNTER, new Sum.SumIntegerFn());
@@ -1015,7 +1014,7 @@ public class PAssert {
       this.actual = actual;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       try {
         ActualT actualContents = c.sideInput(actual);
@@ -1030,13 +1029,13 @@ public class PAssert {
   }
 
   /**
-   * A {@link OldDoFn} that runs a checking {@link SerializableFunction} on the contents of
+   * A {@link DoFn} that runs a checking {@link SerializableFunction} on the contents of
    * the single iterable element of the input {@link PCollection} and adjusts counters and
    * thrown exceptions for use in testing.
    *
    * <p>The singleton property is presumed, not enforced.
    */
-  private static class GroupedValuesCheckerDoFn<ActualT> extends OldDoFn<ActualT, Void> {
+  private static class GroupedValuesCheckerDoFn<ActualT> extends DoFn<ActualT, Void> {
     private final SerializableFunction<ActualT, Void> checkerFn;
     private final Aggregator<Integer, Integer> success =
         createAggregator(SUCCESS_COUNTER, new Sum.SumIntegerFn());
@@ -1047,7 +1046,7 @@ public class PAssert {
       this.checkerFn = checkerFn;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       try {
         doChecks(c.element(), checkerFn, success, failure);
@@ -1061,14 +1060,14 @@ public class PAssert {
   }
 
   /**
-   * A {@link OldDoFn} that runs a checking {@link SerializableFunction} on the contents of
+   * A {@link DoFn} that runs a checking {@link SerializableFunction} on the contents of
    * the single item contained within the single iterable on input and
    * adjusts counters and thrown exceptions for use in testing.
    *
    * <p>The singleton property of the input {@link PCollection} is presumed, not enforced. However,
    * each input element must be a singleton iterable, or this will fail.
    */
-  private static class SingletonCheckerDoFn<ActualT> extends OldDoFn<Iterable<ActualT>, Void> {
+  private static class SingletonCheckerDoFn<ActualT> extends DoFn<Iterable<ActualT>, Void> {
     private final SerializableFunction<ActualT, Void> checkerFn;
     private final Aggregator<Integer, Integer> success =
         createAggregator(SUCCESS_COUNTER, new Sum.SumIntegerFn());
@@ -1079,7 +1078,7 @@ public class PAssert {
       this.checkerFn = checkerFn;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       try {
         ActualT actualContents = Iterables.getOnlyElement(c.element());
@@ -1310,7 +1309,7 @@ public class PAssert {
   }
 
   /**
-   * A OldDoFn that filters elements based on their presence in a static collection of windows.
+   * A DoFn that filters elements based on their presence in a static collection of windows.
    */
   private static final class FilterWindows<T> extends PTransform<PCollection<T>, PCollection<T>> {
     private final StaticWindows windows;
@@ -1324,10 +1323,10 @@ public class PAssert {
       return input.apply("FilterWindows", ParDo.of(new Fn()));
     }
 
-    private class Fn extends OldDoFn<T, T> implements RequiresWindowAccess {
-      @Override
-      public void processElement(ProcessContext c) throws Exception {
-        if (windows.getWindows().contains(c.window())) {
+    private class Fn extends DoFn<T, T> {
+      @ProcessElement
+      public void processElement(ProcessContext c, BoundedWindow window) throws Exception {
+        if (windows.getWindows().contains(window)) {
           c.output(c.element());
         }
       }


[33/51] [abbrv] incubator-beam git commit: Closes #781

Posted by ke...@apache.org.
Closes #781


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/74c5e5e1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/74c5e5e1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/74c5e5e1

Branch: refs/heads/python-sdk
Commit: 74c5e5e1b546759a082d4e4785d46f9d56fa1052
Parents: 734bfb9 71e027d
Author: Dan Halperin <dh...@google.com>
Authored: Thu Aug 4 11:54:31 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Thu Aug 4 11:54:31 2016 -0700

----------------------------------------------------------------------
 .../examples/complete/StreamingWordExtract.java | 18 ++++++------
 .../apache/beam/examples/complete/TfIdf.java    | 28 +++++++++---------
 .../examples/complete/TopWikipediaSessions.java | 31 +++++++++-----------
 .../examples/complete/TrafficMaxLaneFlow.java   | 16 +++++-----
 .../beam/examples/complete/TrafficRoutes.java   | 20 ++++++-------
 .../examples/cookbook/BigQueryTornadoes.java    | 10 +++----
 .../cookbook/CombinePerKeyExamples.java         | 10 +++----
 .../examples/cookbook/DatastoreWordCount.java   | 14 ++++-----
 .../beam/examples/cookbook/FilterExamples.java  | 20 ++++++-------
 .../beam/examples/cookbook/JoinExamples.java    | 18 ++++++------
 .../examples/cookbook/MaxPerKeyExamples.java    | 10 +++----
 .../beam/examples/cookbook/TriggerExample.java  | 25 ++++++++--------
 .../org/apache/beam/examples/WordCountTest.java |  3 +-
 .../examples/complete/AutoCompleteTest.java     |  6 ++--
 .../examples/cookbook/TriggerExampleTest.java   |  6 ++--
 .../beam/examples/complete/game/GameStats.java  | 22 +++++++-------
 .../beam/examples/complete/game/UserScore.java  |  6 ++--
 .../examples/complete/game/UserScoreTest.java   |  2 +-
 18 files changed, 130 insertions(+), 135 deletions(-)
----------------------------------------------------------------------



[24/51] [abbrv] incubator-beam git commit: Exclude "uncallable method of anonymous class" errors from findbugs

Posted by ke...@apache.org.
Exclude "uncallable method of anonymous class" errors from findbugs

These errors are spurious in the presence of reflective invocations,
such as that for the new DoFn.


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/8a2cf609
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/8a2cf609
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/8a2cf609

Branch: refs/heads/python-sdk
Commit: 8a2cf609ec0f3ac74a4c33a89cd852aee87cd0a1
Parents: 34d5012
Author: Kenneth Knowles <kl...@google.com>
Authored: Wed Aug 3 21:23:22 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 11:06:10 2016 -0700

----------------------------------------------------------------------
 sdks/java/build-tools/src/main/resources/beam/findbugs-filter.xml | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/8a2cf609/sdks/java/build-tools/src/main/resources/beam/findbugs-filter.xml
----------------------------------------------------------------------
diff --git a/sdks/java/build-tools/src/main/resources/beam/findbugs-filter.xml b/sdks/java/build-tools/src/main/resources/beam/findbugs-filter.xml
index 28fd0df..f117fbf 100644
--- a/sdks/java/build-tools/src/main/resources/beam/findbugs-filter.xml
+++ b/sdks/java/build-tools/src/main/resources/beam/findbugs-filter.xml
@@ -23,6 +23,8 @@
   <Bug pattern="EI_EXPOSE_REP2" />
   <Bug pattern="SE_NO_SERIALVERSIONID"/>
 
+  <!-- The uncallable method error fails on @ProcessElement style methods -->
+  <Bug pattern="UMAC_UNCALLABLE_METHOD_OF_ANONYMOUS_CLASS"/>
 
   <!--
           Baseline issues below. No new issues should be added to this list. Instead, suppress


[46/51] [abbrv] incubator-beam git commit: Add DirectRunner Reuse Test

Posted by ke...@apache.org.
Add DirectRunner Reuse Test

Two calls to run using the Direct Runner should be independent and
succeed independently.


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/4546fd9c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/4546fd9c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/4546fd9c

Branch: refs/heads/python-sdk
Commit: 4546fd9c5e073eb33787faa302b8695dfd6e04aa
Parents: 7585cfc
Author: Thomas Groh <tg...@google.com>
Authored: Fri Aug 5 09:58:59 2016 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Fri Aug 5 10:04:21 2016 -0700

----------------------------------------------------------------------
 .../beam/runners/direct/DirectRunnerTest.java   | 31 ++++++++++++++++++++
 1 file changed, 31 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4546fd9c/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
index 29dea32..1e73ec0 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
@@ -109,6 +109,37 @@ public class DirectRunnerTest implements Serializable {
     result.awaitCompletion();
   }
 
+  @Test
+  public void reusePipelineSucceeds() throws Throwable {
+    Pipeline p = getPipeline();
+
+    PCollection<KV<String, Long>> counts =
+        p.apply(Create.of("foo", "bar", "foo", "baz", "bar", "foo"))
+            .apply(MapElements.via(new SimpleFunction<String, String>() {
+              @Override
+              public String apply(String input) {
+                return input;
+              }
+            }))
+            .apply(Count.<String>perElement());
+    PCollection<String> countStrs =
+        counts.apply(MapElements.via(new SimpleFunction<KV<String, Long>, String>() {
+          @Override
+          public String apply(KV<String, Long> input) {
+            String str = String.format("%s: %s", input.getKey(), input.getValue());
+            return str;
+          }
+        }));
+
+    PAssert.that(countStrs).containsInAnyOrder("baz: 1", "bar: 2", "foo: 3");
+
+    DirectPipelineResult result = ((DirectPipelineResult) p.run());
+    result.awaitCompletion();
+
+    DirectPipelineResult otherResult = ((DirectPipelineResult) p.run());
+    otherResult.awaitCompletion();
+  }
+
   @Test(timeout = 5000L)
   public void byteArrayCountShouldSucceed() {
     Pipeline p = getPipeline();


[26/51] [abbrv] incubator-beam git commit: Closes #784

Posted by ke...@apache.org.
Closes #784


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/be2758cb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/be2758cb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/be2758cb

Branch: refs/heads/python-sdk
Commit: be2758cbf0014dccc34641fd791232ad061549d3
Parents: b8d7559 fb6d2c2
Author: Dan Halperin <dh...@google.com>
Authored: Thu Aug 4 11:47:31 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Thu Aug 4 11:47:31 2016 -0700

----------------------------------------------------------------------
 .../core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java    | 2 --
 .../core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java | 2 ++
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------



[08/51] [abbrv] incubator-beam git commit: Rename DoFn to OldDoFn

Posted by ke...@apache.org.
Rename DoFn to OldDoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/a64baf48
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/a64baf48
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/a64baf48

Branch: refs/heads/python-sdk
Commit: a64baf4878f28e98da696dacc587c1151d0cdb9e
Parents: 388816a
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 13:00:10 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:52 2016 -0700

----------------------------------------------------------------------
 .../beam/examples/DebuggingWordCount.java       |   6 +-
 .../apache/beam/examples/MinimalWordCount.java  |   7 +-
 .../apache/beam/examples/WindowedWordCount.java |  10 +-
 .../org/apache/beam/examples/WordCount.java     |   8 +-
 .../examples/common/PubsubFileInjector.java     |   6 +-
 .../beam/examples/complete/AutoComplete.java    |  16 +-
 .../examples/complete/StreamingWordExtract.java |  12 +-
 .../apache/beam/examples/complete/TfIdf.java    |  16 +-
 .../examples/complete/TopWikipediaSessions.java |  12 +-
 .../examples/complete/TrafficMaxLaneFlow.java   |  10 +-
 .../beam/examples/complete/TrafficRoutes.java   |  12 +-
 .../examples/cookbook/BigQueryTornadoes.java    |   6 +-
 .../cookbook/CombinePerKeyExamples.java         |   6 +-
 .../examples/cookbook/DatastoreWordCount.java   |  11 +-
 .../beam/examples/cookbook/FilterExamples.java  |  12 +-
 .../beam/examples/cookbook/JoinExamples.java    |  10 +-
 .../examples/cookbook/MaxPerKeyExamples.java    |   6 +-
 .../beam/examples/cookbook/TriggerExample.java  |  12 +-
 .../org/apache/beam/examples/WordCountTest.java |   2 +-
 .../examples/complete/AutoCompleteTest.java     |   4 +-
 .../examples/cookbook/TriggerExampleTest.java   |   4 +-
 .../beam/examples/complete/game/GameStats.java  |  10 +-
 .../beam/examples/complete/game/UserScore.java  |   4 +-
 .../complete/game/utils/WriteToBigQuery.java    |  12 +-
 .../game/utils/WriteWindowedToBigQuery.java     |   8 +-
 .../examples/complete/game/UserScoreTest.java   |   2 +-
 .../core/GroupAlsoByWindowViaWindowSetDoFn.java |  12 +-
 .../core/UnboundedReadFromBoundedSource.java    |   2 +-
 .../apache/beam/sdk/util/AssignWindowsDoFn.java |  10 +-
 .../org/apache/beam/sdk/util/DoFnRunner.java    |  21 +-
 .../apache/beam/sdk/util/DoFnRunnerBase.java    |  54 +-
 .../org/apache/beam/sdk/util/DoFnRunners.java   |  24 +-
 .../beam/sdk/util/GroupAlsoByWindowsDoFn.java   |   6 +-
 .../GroupAlsoByWindowsViaOutputBufferDoFn.java  |   4 +-
 .../sdk/util/GroupByKeyViaGroupByKeyOnly.java   |   6 +-
 .../sdk/util/LateDataDroppingDoFnRunner.java    |   4 +-
 .../apache/beam/sdk/util/PaneInfoTracker.java   |   1 -
 .../apache/beam/sdk/util/ReduceFnRunner.java    |   4 +-
 .../apache/beam/sdk/util/SimpleDoFnRunner.java  |  12 +-
 .../org/apache/beam/sdk/util/WatermarkHold.java |   1 -
 .../beam/sdk/util/ReduceFnRunnerTest.java       |   1 +
 .../apache/beam/sdk/util/ReduceFnTester.java    |   1 +
 .../beam/sdk/util/SimpleDoFnRunnerTest.java     |   6 +-
 .../GroupAlsoByWindowEvaluatorFactory.java      |   4 +-
 .../ImmutabilityCheckingBundleFactory.java      |   4 +-
 .../beam/runners/direct/ParDoEvaluator.java     |   4 +-
 .../direct/ParDoMultiEvaluatorFactory.java      |  11 +-
 .../direct/ParDoSingleEvaluatorFactory.java     |  11 +-
 .../direct/TransformEvaluatorFactory.java       |   6 +-
 .../direct/WriteWithShardingFactory.java        |   4 +-
 .../ConsumerTrackingPipelineVisitorTest.java    |  22 +-
 .../beam/runners/direct/DirectRunnerTest.java   |  24 +-
 .../ImmutabilityCheckingBundleFactoryTest.java  |   6 +-
 .../ImmutabilityEnforcementFactoryTest.java     |   6 +-
 .../direct/KeyedPValueTrackingVisitorTest.java  |   6 +-
 .../beam/runners/direct/ParDoEvaluatorTest.java |   6 +-
 .../direct/ParDoMultiEvaluatorFactoryTest.java  |  10 +-
 .../direct/ParDoSingleEvaluatorFactoryTest.java |  10 +-
 .../runners/direct/WatermarkManagerTest.java    |   7 +-
 .../beam/runners/flink/examples/TFIDF.java      |  16 +-
 .../beam/runners/flink/examples/WordCount.java  |   4 +-
 .../flink/examples/streaming/AutoComplete.java  |  16 +-
 .../flink/examples/streaming/JoinExamples.java  |   8 +-
 .../examples/streaming/KafkaIOExamples.java     |   4 +-
 .../KafkaWindowedWordCountExample.java          |   6 +-
 .../examples/streaming/WindowedWordCount.java   |   6 +-
 .../FlinkBatchTransformTranslators.java         |  12 +-
 .../FlinkStreamingTransformTranslators.java     |   9 +-
 .../functions/FlinkDoFnFunction.java            |  10 +-
 .../FlinkMergingNonShuffleReduceFunction.java   |   8 +-
 .../functions/FlinkMultiOutputDoFnFunction.java |  10 +-
 .../FlinkMultiOutputProcessContext.java         |   6 +-
 .../functions/FlinkNoElementAssignContext.java  |   8 +-
 .../functions/FlinkPartialReduceFunction.java   |   8 +-
 .../functions/FlinkProcessContext.java          |  16 +-
 .../functions/FlinkReduceFunction.java          |   8 +-
 .../streaming/FlinkAbstractParDoWrapper.java    |  18 +-
 .../FlinkGroupAlsoByWindowWrapper.java          |  10 +-
 .../streaming/FlinkParDoBoundMultiWrapper.java  |   4 +-
 .../streaming/FlinkParDoBoundWrapper.java       |   4 +-
 .../state/AbstractFlinkTimerInternals.java      |   4 +-
 .../beam/runners/flink/PipelineOptionsTest.java |   6 +-
 .../beam/runners/flink/ReadSourceITCase.java    |   4 +-
 .../flink/ReadSourceStreamingITCase.java        |   4 +-
 .../flink/streaming/GroupByNullKeyTest.java     |   8 +-
 .../streaming/TopWikipediaSessionsITCase.java   |   6 +-
 .../dataflow/DataflowPipelineTranslator.java    |   6 +-
 .../beam/runners/dataflow/DataflowRunner.java   |  83 ++-
 .../dataflow/internal/AssignWindows.java        |   6 +-
 .../beam/runners/dataflow/util/DoFnInfo.java    |  16 +-
 .../DataflowPipelineTranslatorTest.java         |  10 +-
 .../beam/runners/spark/examples/WordCount.java  |   4 +-
 .../runners/spark/translation/DoFnFunction.java |   8 +-
 .../spark/translation/MultiDoFnFunction.java    |   8 +-
 .../spark/translation/SparkProcessContext.java  |  18 +-
 .../spark/translation/TransformTranslator.java  |   7 +-
 .../streaming/StreamingTransformTranslator.java |   4 +-
 .../apache/beam/runners/spark/TfIdfTest.java    |  12 +-
 .../spark/translation/CombinePerKeyTest.java    |   4 +-
 .../spark/translation/DoFnOutputTest.java       |   4 +-
 .../translation/MultiOutputWordCountTest.java   |   8 +-
 .../spark/translation/SerializationTest.java    |  10 +-
 .../spark/translation/SideEffectsTest.java      |   4 +-
 .../streaming/KafkaStreamingTest.java           |   4 +-
 .../org/apache/beam/sdk/coders/AvroCoder.java   |   1 -
 .../apache/beam/sdk/coders/DurationCoder.java   |   1 -
 .../apache/beam/sdk/coders/InstantCoder.java    |   1 -
 .../java/org/apache/beam/sdk/io/PubsubIO.java   |   6 +-
 .../apache/beam/sdk/io/PubsubUnboundedSink.java |   8 +-
 .../beam/sdk/io/PubsubUnboundedSource.java      |   4 +-
 .../java/org/apache/beam/sdk/io/Source.java     |   2 +-
 .../main/java/org/apache/beam/sdk/io/Write.java |  21 +-
 .../org/apache/beam/sdk/options/GcpOptions.java |   1 -
 .../beam/sdk/options/PipelineOptions.java       |   8 +-
 .../sdk/options/PipelineOptionsFactory.java     |   1 -
 .../sdk/options/PipelineOptionsReflector.java   |   1 +
 .../beam/sdk/runners/AggregatorValues.java      |   4 +-
 .../org/apache/beam/sdk/testing/PAssert.java    |  24 +-
 .../beam/sdk/testing/SerializableMatchers.java  |   1 -
 .../apache/beam/sdk/testing/TestPipeline.java   |   1 -
 .../beam/sdk/testing/TestPipelineOptions.java   |   1 +
 .../apache/beam/sdk/transforms/Aggregator.java  |  14 +-
 .../sdk/transforms/AggregatorRetriever.java     |   6 +-
 .../org/apache/beam/sdk/transforms/Combine.java |  14 +-
 .../apache/beam/sdk/transforms/CombineFns.java  |   4 +-
 .../org/apache/beam/sdk/transforms/Count.java   |   2 +-
 .../org/apache/beam/sdk/transforms/Create.java  |   2 +-
 .../org/apache/beam/sdk/transforms/DoFn.java    | 565 -------------------
 .../beam/sdk/transforms/DoFnReflector.java      |  38 +-
 .../apache/beam/sdk/transforms/DoFnTester.java  |  86 +--
 .../beam/sdk/transforms/DoFnWithContext.java    |  16 +-
 .../org/apache/beam/sdk/transforms/Filter.java  |   2 +-
 .../beam/sdk/transforms/FlatMapElements.java    |   2 +-
 .../org/apache/beam/sdk/transforms/Flatten.java |   2 +-
 .../apache/beam/sdk/transforms/GroupByKey.java  |   2 +-
 .../transforms/IntraBundleParallelization.java  |  40 +-
 .../org/apache/beam/sdk/transforms/Keys.java    |   2 +-
 .../org/apache/beam/sdk/transforms/KvSwap.java  |   2 +-
 .../apache/beam/sdk/transforms/MapElements.java |   2 +-
 .../org/apache/beam/sdk/transforms/OldDoFn.java | 565 +++++++++++++++++++
 .../apache/beam/sdk/transforms/PTransform.java  |   2 +-
 .../org/apache/beam/sdk/transforms/ParDo.java   | 203 +++----
 .../apache/beam/sdk/transforms/Partition.java   |   2 +-
 .../beam/sdk/transforms/RemoveDuplicates.java   |   2 +-
 .../org/apache/beam/sdk/transforms/Sample.java  |   4 +-
 .../beam/sdk/transforms/SimpleFunction.java     |   6 +-
 .../org/apache/beam/sdk/transforms/Values.java  |   2 +-
 .../org/apache/beam/sdk/transforms/View.java    |   8 +-
 .../apache/beam/sdk/transforms/WithKeys.java    |   2 +-
 .../beam/sdk/transforms/WithTimestamps.java     |   4 +-
 .../sdk/transforms/display/DisplayData.java     |   1 -
 .../beam/sdk/transforms/join/CoGbkResult.java   |   1 -
 .../beam/sdk/transforms/join/CoGroupByKey.java  |  14 +-
 .../sdk/transforms/windowing/AfterEach.java     |   1 +
 .../windowing/AfterProcessingTime.java          |   1 +
 .../transforms/windowing/IntervalWindow.java    |   1 -
 .../beam/sdk/transforms/windowing/Never.java    |   1 +
 .../beam/sdk/transforms/windowing/PaneInfo.java |  10 +-
 .../beam/sdk/transforms/windowing/Window.java   |   4 +-
 .../beam/sdk/util/BaseExecutionContext.java     |   4 +-
 .../apache/beam/sdk/util/BucketingFunction.java |   1 +
 .../beam/sdk/util/CombineContextFactory.java    |   6 +-
 .../apache/beam/sdk/util/ExecutionContext.java  |   8 +-
 .../apache/beam/sdk/util/MovingFunction.java    |   1 +
 .../beam/sdk/util/PerKeyCombineFnRunner.java    |  44 +-
 .../beam/sdk/util/PerKeyCombineFnRunners.java   |  30 +-
 .../org/apache/beam/sdk/util/PubsubClient.java  |   1 +
 .../apache/beam/sdk/util/PubsubTestClient.java  |   1 +
 .../sdk/util/ReifyTimestampAndWindowsDoFn.java  |   6 +-
 .../org/apache/beam/sdk/util/Reshuffle.java     |   4 +-
 .../apache/beam/sdk/util/SerializableUtils.java |   2 +-
 .../org/apache/beam/sdk/util/StringUtils.java   |   2 +-
 .../beam/sdk/util/SystemDoFnInternal.java       |   6 +-
 .../apache/beam/sdk/util/TimerInternals.java    |   1 -
 .../apache/beam/sdk/util/ValueWithRecordId.java |   6 +-
 .../org/apache/beam/sdk/util/WindowedValue.java |   1 -
 .../beam/sdk/util/WindowingInternals.java       |   4 +-
 .../beam/sdk/util/common/ReflectHelpers.java    |   1 +
 .../beam/sdk/values/TimestampedValue.java       |   1 -
 .../java/org/apache/beam/sdk/PipelineTest.java  |   6 +-
 .../apache/beam/sdk/coders/AvroCoderTest.java   |   4 +-
 .../beam/sdk/coders/CoderRegistryTest.java      |   6 +-
 .../beam/sdk/coders/SerializableCoderTest.java  |   6 +-
 .../org/apache/beam/sdk/io/AvroSourceTest.java  |   1 +
 .../io/BoundedReadFromUnboundedSourceTest.java  |   1 +
 .../beam/sdk/io/CompressedSourceTest.java       |   1 +
 .../apache/beam/sdk/io/CountingInputTest.java   |   5 +-
 .../apache/beam/sdk/io/CountingSourceTest.java  |   4 +-
 .../beam/sdk/io/OffsetBasedSourceTest.java      |   1 +
 .../beam/sdk/io/PubsubUnboundedSinkTest.java    |   4 +-
 .../java/org/apache/beam/sdk/io/ReadTest.java   |   1 +
 .../java/org/apache/beam/sdk/io/TextIOTest.java |   1 +
 .../java/org/apache/beam/sdk/io/WriteTest.java  |   7 +-
 .../org/apache/beam/sdk/io/XmlSinkTest.java     |   1 +
 .../apache/beam/sdk/options/GcpOptionsTest.java |   1 +
 .../sdk/options/GoogleApiDebugOptionsTest.java  |   1 -
 .../sdk/options/PipelineOptionsFactoryTest.java |   1 -
 .../beam/sdk/options/PipelineOptionsTest.java   |   1 -
 .../sdk/options/ProxyInvocationHandlerTest.java |   2 +-
 .../AggregatorPipelineExtractorTest.java        |   6 +-
 .../apache/beam/sdk/testing/PAssertTest.java    |   1 -
 .../beam/sdk/testing/TestPipelineTest.java      |   1 -
 .../transforms/ApproximateQuantilesTest.java    |   1 +
 .../sdk/transforms/ApproximateUniqueTest.java   |   5 +-
 .../beam/sdk/transforms/CombineFnsTest.java     |   2 +-
 .../apache/beam/sdk/transforms/CombineTest.java |  12 +-
 .../apache/beam/sdk/transforms/CreateTest.java  |   2 +-
 .../beam/sdk/transforms/DoFnContextTest.java    |  69 ---
 .../DoFnDelegatingAggregatorTest.java           |  16 +-
 .../beam/sdk/transforms/DoFnReflectorTest.java  |   2 +-
 .../apache/beam/sdk/transforms/DoFnTest.java    | 242 --------
 .../beam/sdk/transforms/DoFnTesterTest.java     |  10 +-
 .../sdk/transforms/DoFnWithContextTest.java     |   6 +-
 .../apache/beam/sdk/transforms/FlattenTest.java |   4 +-
 .../beam/sdk/transforms/GroupByKeyTest.java     |   6 +-
 .../IntraBundleParallelizationTest.java         |  23 +-
 .../beam/sdk/transforms/MapElementsTest.java    |   1 +
 .../org/apache/beam/sdk/transforms/MaxTest.java |   1 +
 .../org/apache/beam/sdk/transforms/MinTest.java |   2 +
 .../apache/beam/sdk/transforms/NoOpDoFn.java    |  20 +-
 .../beam/sdk/transforms/OldDoFnContextTest.java |  69 +++
 .../apache/beam/sdk/transforms/OldDoFnTest.java | 242 ++++++++
 .../apache/beam/sdk/transforms/ParDoTest.java   |  96 ++--
 .../beam/sdk/transforms/PartitionTest.java      |   1 +
 .../apache/beam/sdk/transforms/SampleTest.java  |   1 +
 .../org/apache/beam/sdk/transforms/TopTest.java |   1 +
 .../apache/beam/sdk/transforms/ViewTest.java    | 398 ++++++-------
 .../beam/sdk/transforms/WithTimestampsTest.java |   8 +-
 .../display/DisplayDataEvaluatorTest.java       |   6 +-
 .../display/DisplayDataMatchersTest.java        |   1 +
 .../sdk/transforms/display/DisplayDataTest.java |   6 +-
 .../sdk/transforms/join/CoGroupByKeyTest.java   |  18 +-
 .../sdk/transforms/windowing/NeverTest.java     |   1 +
 .../sdk/transforms/windowing/WindowTest.java    |   6 +-
 .../sdk/transforms/windowing/WindowingTest.java |  10 +-
 .../beam/sdk/util/BucketingFunctionTest.java    |   4 +-
 .../beam/sdk/util/MovingFunctionTest.java       |   4 +-
 .../beam/sdk/util/SerializableUtilsTest.java    |   1 -
 .../apache/beam/sdk/util/SerializerTest.java    |   1 -
 .../apache/beam/sdk/util/StringUtilsTest.java   |  16 +-
 .../org/apache/beam/sdk/util/TriggerTester.java |   1 +
 .../beam/sdk/util/common/CounterTest.java       |   1 +
 .../beam/sdk/values/PCollectionTupleTest.java   |   4 +-
 .../apache/beam/sdk/values/TypedPValueTest.java |   6 +-
 .../beam/sdk/extensions/joinlibrary/Join.java   |   8 +-
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    |  18 +-
 .../beam/sdk/io/gcp/bigtable/BigtableIO.java    |   4 +-
 .../beam/sdk/io/gcp/datastore/V1Beta3.java      |  13 +-
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     |   6 +-
 .../sdk/io/gcp/bigtable/BigtableWriteIT.java    |   4 +-
 .../sdk/io/gcp/datastore/V1Beta3TestUtil.java   |   6 +-
 .../java/org/apache/beam/sdk/io/jms/JmsIO.java  |   4 +-
 .../org/apache/beam/sdk/io/kafka/KafkaIO.java   |   8 +-
 .../apache/beam/sdk/io/kafka/KafkaIOTest.java   |   7 +-
 .../sdk/transforms/WithTimestampsJava8Test.java |   4 +-
 .../src/main/java/DebuggingWordCount.java       |   4 +-
 .../src/main/java/MinimalWordCount.java         |   6 +-
 .../src/main/java/WindowedWordCount.java        |   6 +-
 .../src/main/java/WordCount.java                |   6 +-
 .../main/java/common/PubsubFileInjector.java    |   4 +-
 .../src/main/java/StarterPipeline.java          |   6 +-
 .../src/main/java/it/pkg/StarterPipeline.java   |   6 +-
 .../transforms/DoFnReflectorBenchmark.java      |  14 +-
 263 files changed, 2196 insertions(+), 2151 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
index 8d85d44..3c43152 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.values.KV;
@@ -106,8 +106,8 @@ import java.util.regex.Pattern;
  * overridden with {@code --inputFile}.
  */
 public class DebuggingWordCount {
-  /** A DoFn that filters for a specific key based upon a regular expression. */
-  public static class FilterTextFn extends DoFn<KV<String, Long>, KV<String, Long>> {
+  /** A OldDoFn that filters for a specific key based upon a regular expression. */
+  public static class FilterTextFn extends OldDoFn<KV<String, Long>, KV<String, Long>> {
     /**
      * Concept #1: The logger below uses the fully qualified class name of FilterTextFn
      * as the logger. All log statements emitted by this logger will be referenced by this name

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
index 9f6d61a..ab0bb6d 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
@@ -22,8 +22,8 @@ import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SimpleFunction;
 import org.apache.beam.sdk.values.KV;
@@ -89,10 +89,11 @@ public class MinimalWordCount {
     // the input text (a set of Shakespeare's texts).
     p.apply(TextIO.Read.from("gs://dataflow-samples/shakespeare/*"))
      // Concept #2: Apply a ParDo transform to our PCollection of text lines. This ParDo invokes a
-     // DoFn (defined in-line) on each element that tokenizes the text line into individual words.
+     // OldDoFn (defined in-line) on each element that tokenizes the text line into individua
+     // words.
      // The ParDo returns a PCollection<String>, where each element is an individual word in
      // Shakespeare's collected texts.
-     .apply("ExtractWords", ParDo.of(new DoFn<String, String>() {
+     .apply("ExtractWords", ParDo.of(new OldDoFn<String, String>() {
                        @Override
                        public void processElement(ProcessContext c) {
                          for (String word : c.element().split("[^a-zA-Z']+")) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
index 7a4b29f..17f7da3 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
 import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.FixedWindows;
 import org.apache.beam.sdk.transforms.windowing.Window;
@@ -103,14 +103,14 @@ public class WindowedWordCount {
     static final int WINDOW_SIZE = 1;  // Default window duration in minutes
 
   /**
-   * Concept #2: A DoFn that sets the data element timestamp. This is a silly method, just for
+   * Concept #2: A OldDoFn that sets the data element timestamp. This is a silly method, just for
    * this example, for the bounded data case.
    *
    * <p>Imagine that many ghosts of Shakespeare are all typing madly at the same time to recreate
    * his masterworks. Each line of the corpus will get a random associated timestamp somewhere in a
    * 2-hour period.
    */
-  static class AddTimestampFn extends DoFn<String, String> {
+  static class AddTimestampFn extends OldDoFn<String, String> {
     private static final Duration RAND_RANGE = Duration.standardHours(2);
     private final Instant minTimestamp;
 
@@ -130,8 +130,8 @@ public class WindowedWordCount {
     }
   }
 
-  /** A DoFn that converts a Word and Count into a BigQuery table row. */
-  static class FormatAsTableRowFn extends DoFn<KV<String, Long>, TableRow> {
+  /** A OldDoFn that converts a Word and Count into a BigQuery table row. */
+  static class FormatAsTableRowFn extends OldDoFn<KV<String, Long>, TableRow> {
     @Override
     public void processElement(ProcessContext c) {
       TableRow row = new TableRow()

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/WordCount.java b/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
index af16c44..274d1ad 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
@@ -26,8 +26,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SimpleFunction;
@@ -97,10 +97,10 @@ public class WordCount {
 
   /**
    * Concept #2: You can make your pipeline code less verbose by defining your DoFns statically out-
-   * of-line. This DoFn tokenizes lines of text into individual words; we pass it to a ParDo in the
-   * pipeline.
+   * of-line. This OldDoFn tokenizes lines of text into individual words; we pass it to a ParDo in
+   * the pipeline.
    */
-  static class ExtractWordsFn extends DoFn<String, String> {
+  static class ExtractWordsFn extends OldDoFn<String, String> {
     private final Aggregator<Long, Long> emptyLines =
         createAggregator("emptyLines", new Sum.SumLongFn());
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java b/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java
index 15eda06..0a93521 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.PubsubOptions;
 import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.IntraBundleParallelization;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.util.Transport;
 
 import com.google.api.services.pubsub.Pubsub;
@@ -71,8 +71,8 @@ public class PubsubFileInjector {
     }
   }
 
-  /** A DoFn that publishes non-empty lines to Google Cloud PubSub. */
-  public static class Bound extends DoFn<String, Void> {
+  /** A OldDoFn that publishes non-empty lines to Google Cloud PubSub. */
+  public static class Bound extends OldDoFn<String, Void> {
     private final String outputTopic;
     private final String timestampLabelKey;
     public transient Pubsub pubsub;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
index c6272e8..7b44af8 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
@@ -36,9 +36,9 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.StreamingOptions;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Filter;
 import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Partition;
@@ -130,7 +130,7 @@ public class AutoComplete {
 
         // Map the KV outputs of Count into our own CompletionCandiate class.
         .apply("CreateCompletionCandidates", ParDo.of(
-            new DoFn<KV<String, Long>, CompletionCandidate>() {
+            new OldDoFn<KV<String, Long>, CompletionCandidate>() {
               @Override
               public void processElement(ProcessContext c) {
                 c.output(new CompletionCandidate(c.element().getKey(), c.element().getValue()));
@@ -209,7 +209,7 @@ public class AutoComplete {
     }
 
     private static class FlattenTops
-        extends DoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
+        extends OldDoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
       @Override
       public void processElement(ProcessContext c) {
         for (CompletionCandidate cc : c.element().getValue()) {
@@ -260,10 +260,10 @@ public class AutoComplete {
   }
 
   /**
-   * A DoFn that keys each candidate by all its prefixes.
+   * A OldDoFn that keys each candidate by all its prefixes.
    */
   private static class AllPrefixes
-      extends DoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
+      extends OldDoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
     private final int minPrefix;
     private final int maxPrefix;
     public AllPrefixes(int minPrefix) {
@@ -341,7 +341,7 @@ public class AutoComplete {
   /**
    * Takes as input a set of strings, and emits each #hashtag found therein.
    */
-  static class ExtractHashtags extends DoFn<String, String> {
+  static class ExtractHashtags extends OldDoFn<String, String> {
     @Override
     public void processElement(ProcessContext c) {
       Matcher m = Pattern.compile("#\\S+").matcher(c.element());
@@ -351,7 +351,7 @@ public class AutoComplete {
     }
   }
 
-  static class FormatForBigquery extends DoFn<KV<String, List<CompletionCandidate>>, TableRow> {
+  static class FormatForBigquery extends OldDoFn<KV<String, List<CompletionCandidate>>, TableRow> {
     @Override
     public void processElement(ProcessContext c) {
       List<TableRow> completions = new ArrayList<>();
@@ -385,7 +385,7 @@ public class AutoComplete {
    * Takes as input a the top candidates per prefix, and emits an entity
    * suitable for writing to Datastore.
    */
-  static class FormatForDatastore extends DoFn<KV<String, List<CompletionCandidate>>, Entity> {
+  static class FormatForDatastore extends OldDoFn<KV<String, List<CompletionCandidate>>, Entity> {
     private String kind;
 
     public FormatForDatastore(String kind) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java b/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
index db646a5..b0c9ffd 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
@@ -28,7 +28,7 @@ import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.StreamingOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 
 import com.google.api.services.bigquery.model.TableFieldSchema;
@@ -55,8 +55,8 @@ import java.util.ArrayList;
  */
 public class StreamingWordExtract {
 
-  /** A DoFn that tokenizes lines of text into individual words. */
-  static class ExtractWords extends DoFn<String, String> {
+  /** A OldDoFn that tokenizes lines of text into individual words. */
+  static class ExtractWords extends OldDoFn<String, String> {
     @Override
     public void processElement(ProcessContext c) {
       String[] words = c.element().split("[^a-zA-Z']+");
@@ -68,8 +68,8 @@ public class StreamingWordExtract {
     }
   }
 
-  /** A DoFn that uppercases a word. */
-  static class Uppercase extends DoFn<String, String> {
+  /** A OldDoFn that uppercases a word. */
+  static class Uppercase extends OldDoFn<String, String> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(c.element().toUpperCase());
@@ -79,7 +79,7 @@ public class StreamingWordExtract {
   /**
    * Converts strings into BigQuery rows.
    */
-  static class StringToRowConverter extends DoFn<String, TableRow> {
+  static class StringToRowConverter extends OldDoFn<String, TableRow> {
     /**
      * In this example, put the whole string into single BigQuery field.
      */

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
index 8305314..470a689 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
@@ -30,9 +30,9 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.Keys;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.RemoveDuplicates;
@@ -225,7 +225,7 @@ public class TfIdf {
       // of the words in the document associated with that that URI.
       PCollection<KV<URI, String>> uriToWords = uriToContent
           .apply("SplitWords", ParDo.of(
-              new DoFn<KV<URI, String>, KV<URI, String>>() {
+              new OldDoFn<KV<URI, String>, KV<URI, String>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   URI uri = c.element().getKey();
@@ -268,7 +268,7 @@ public class TfIdf {
       // by the URI key.
       PCollection<KV<URI, KV<String, Long>>> uriToWordAndCount = uriAndWordToCount
           .apply("ShiftKeys", ParDo.of(
-              new DoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
+              new OldDoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   URI uri = c.element().getKey().getKey();
@@ -307,7 +307,7 @@ public class TfIdf {
       // divided by the total number of words in the document.
       PCollection<KV<String, KV<URI, Double>>> wordToUriAndTf = uriToWordAndCountAndTotal
           .apply("ComputeTermFrequencies", ParDo.of(
-              new DoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+              new OldDoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   URI uri = c.element().getKey();
@@ -328,11 +328,11 @@ public class TfIdf {
       // documents in which the word appears divided by the total
       // number of documents in the corpus. Note how the total number of
       // documents is passed as a side input; the same value is
-      // presented to each invocation of the DoFn.
+      // presented to each invocation of the OldDoFn.
       PCollection<KV<String, Double>> wordToDf = wordToDocCount
           .apply("ComputeDocFrequencies", ParDo
               .withSideInputs(totalDocuments)
-              .of(new DoFn<KV<String, Long>, KV<String, Double>>() {
+              .of(new OldDoFn<KV<String, Long>, KV<String, Double>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   String word = c.element().getKey();
@@ -361,7 +361,7 @@ public class TfIdf {
       // divided by the log of the document frequency.
       PCollection<KV<String, KV<URI, Double>>> wordToUriAndTfIdf = wordToUriAndTfAndDf
           .apply("ComputeTfIdf", ParDo.of(
-              new DoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+              new OldDoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   String word = c.element().getKey();
@@ -400,7 +400,7 @@ public class TfIdf {
     @Override
     public PDone apply(PCollection<KV<String, KV<URI, Double>>> wordToUriAndTfIdf) {
       return wordToUriAndTfIdf
-          .apply("Format", ParDo.of(new DoFn<KV<String, KV<URI, Double>>, String>() {
+          .apply("Format", ParDo.of(new OldDoFn<KV<String, KV<URI, Double>>, String>() {
             @Override
             public void processElement(ProcessContext c) {
               c.output(String.format("%s,\t%s,\t%f",

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
index f8af02a..0ed89d2 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
@@ -26,8 +26,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableComparator;
@@ -85,7 +85,7 @@ public class TopWikipediaSessions {
   /**
    * Extracts user and timestamp from a TableRow representing a Wikipedia edit.
    */
-  static class ExtractUserAndTimestamp extends DoFn<TableRow, String> {
+  static class ExtractUserAndTimestamp extends OldDoFn<TableRow, String> {
     @Override
     public void processElement(ProcessContext c) {
       TableRow row = c.element();
@@ -132,7 +132,7 @@ public class TopWikipediaSessions {
     }
   }
 
-  static class SessionsToStringsDoFn extends DoFn<KV<String, Long>, KV<String, Long>>
+  static class SessionsToStringsDoFn extends OldDoFn<KV<String, Long>, KV<String, Long>>
       implements RequiresWindowAccess {
 
     @Override
@@ -142,7 +142,7 @@ public class TopWikipediaSessions {
     }
   }
 
-  static class FormatOutputDoFn extends DoFn<List<KV<String, Long>>, String>
+  static class FormatOutputDoFn extends OldDoFn<List<KV<String, Long>>, String>
       implements RequiresWindowAccess {
     @Override
     public void processElement(ProcessContext c) {
@@ -168,7 +168,7 @@ public class TopWikipediaSessions {
           .apply(ParDo.of(new ExtractUserAndTimestamp()))
 
           .apply("SampleUsers", ParDo.of(
-              new DoFn<String, String>() {
+              new OldDoFn<String, String>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   if (Math.abs(c.element().hashCode()) <= Integer.MAX_VALUE * samplingThreshold) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
index 7b1496f..9122015 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
@@ -30,7 +30,7 @@ import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -145,12 +145,12 @@ public class TrafficMaxLaneFlow {
   /**
    * Extract the timestamp field from the input string, and use it as the element timestamp.
    */
-  static class ExtractTimestamps extends DoFn<String, String> {
+  static class ExtractTimestamps extends OldDoFn<String, String> {
     private static final DateTimeFormatter dateTimeFormat =
         DateTimeFormat.forPattern("MM/dd/yyyy HH:mm:ss");
 
     @Override
-    public void processElement(DoFn<String, String>.ProcessContext c) throws Exception {
+    public void processElement(OldDoFn<String, String>.ProcessContext c) throws Exception {
       String[] items = c.element().split(",");
       if (items.length > 0) {
         try {
@@ -170,7 +170,7 @@ public class TrafficMaxLaneFlow {
    * information. The number of lanes for which data is present depends upon which freeway the data
    * point comes from.
    */
-  static class ExtractFlowInfoFn extends DoFn<String, KV<String, LaneInfo>> {
+  static class ExtractFlowInfoFn extends OldDoFn<String, KV<String, LaneInfo>> {
 
     @Override
     public void processElement(ProcessContext c) {
@@ -226,7 +226,7 @@ public class TrafficMaxLaneFlow {
    * Format the results of the Max Lane flow calculation to a TableRow, to save to BigQuery.
    * Add the timestamp from the window context.
    */
-  static class FormatMaxesFn extends DoFn<KV<String, LaneInfo>, TableRow> {
+  static class FormatMaxesFn extends OldDoFn<KV<String, LaneInfo>, TableRow> {
     @Override
     public void processElement(ProcessContext c) {
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
index ebf7b9a..30091b6 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
@@ -29,8 +29,8 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
 import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
@@ -149,12 +149,12 @@ public class TrafficRoutes {
   /**
    * Extract the timestamp field from the input string, and use it as the element timestamp.
    */
-  static class ExtractTimestamps extends DoFn<String, String> {
+  static class ExtractTimestamps extends OldDoFn<String, String> {
     private static final DateTimeFormatter dateTimeFormat =
         DateTimeFormat.forPattern("MM/dd/yyyy HH:mm:ss");
 
     @Override
-    public void processElement(DoFn<String, String>.ProcessContext c) throws Exception {
+    public void processElement(OldDoFn<String, String>.ProcessContext c) throws Exception {
       String[] items = c.element().split(",");
       String timestamp = tryParseTimestamp(items);
       if (timestamp != null) {
@@ -171,7 +171,7 @@ public class TrafficRoutes {
    * Filter out readings for the stations along predefined 'routes', and output
    * (station, speed info) keyed on route.
    */
-  static class ExtractStationSpeedFn extends DoFn<String, KV<String, StationSpeed>> {
+  static class ExtractStationSpeedFn extends OldDoFn<String, KV<String, StationSpeed>> {
 
     @Override
     public void processElement(ProcessContext c) {
@@ -200,7 +200,7 @@ public class TrafficRoutes {
    * Note: these calculations are for example purposes only, and are unrealistic and oversimplified.
    */
   static class GatherStats
-      extends DoFn<KV<String, Iterable<StationSpeed>>, KV<String, RouteInfo>> {
+      extends OldDoFn<KV<String, Iterable<StationSpeed>>, KV<String, RouteInfo>> {
     @Override
     public void processElement(ProcessContext c) throws IOException {
       String route = c.element().getKey();
@@ -243,7 +243,7 @@ public class TrafficRoutes {
   /**
    * Format the results of the slowdown calculations to a TableRow, to save to BigQuery.
    */
-  static class FormatStatsFn extends DoFn<KV<String, RouteInfo>, TableRow> {
+  static class FormatStatsFn extends OldDoFn<KV<String, RouteInfo>, TableRow> {
     @Override
     public void processElement(ProcessContext c) {
       RouteInfo routeInfo = c.element().getValue();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
index 665be01..6002b11 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
@@ -25,7 +25,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.KV;
@@ -81,7 +81,7 @@ public class BigQueryTornadoes {
    * Examines each row in the input table. If a tornado was recorded
    * in that sample, the month in which it occurred is output.
    */
-  static class ExtractTornadoesFn extends DoFn<TableRow, Integer> {
+  static class ExtractTornadoesFn extends OldDoFn<TableRow, Integer> {
     @Override
     public void processElement(ProcessContext c){
       TableRow row = c.element();
@@ -95,7 +95,7 @@ public class BigQueryTornadoes {
    * Prepares the data for writing to BigQuery by building a TableRow object containing an
    * integer representation of month and the number of tornadoes that occurred in each month.
    */
-  static class FormatCountsFn extends DoFn<KV<Integer, Long>, TableRow> {
+  static class FormatCountsFn extends OldDoFn<KV<Integer, Long>, TableRow> {
     @Override
     public void processElement(ProcessContext c) {
       TableRow row = new TableRow()

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
index 252f3cc..d0bce5d 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -90,7 +90,7 @@ public class CombinePerKeyExamples {
    * Examines each row in the input table. If the word is greater than or equal to MIN_WORD_LENGTH,
    * outputs word, play_name.
    */
-  static class ExtractLargeWordsFn extends DoFn<TableRow, KV<String, String>> {
+  static class ExtractLargeWordsFn extends OldDoFn<TableRow, KV<String, String>> {
     private final Aggregator<Long, Long> smallerWords =
         createAggregator("smallerWords", new Sum.SumLongFn());
 
@@ -114,7 +114,7 @@ public class CombinePerKeyExamples {
    * Prepares the data for writing to BigQuery by building a TableRow object
    * containing a word with a string listing the plays in which it appeared.
    */
-  static class FormatShakespeareOutputFn extends DoFn<KV<String, String>, TableRow> {
+  static class FormatShakespeareOutputFn extends OldDoFn<KV<String, String>, TableRow> {
     @Override
     public void processElement(ProcessContext c) {
       TableRow row = new TableRow()

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
index 847523b..1850e89 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
@@ -32,8 +32,8 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 
 import com.google.datastore.v1beta3.Entity;
@@ -44,7 +44,6 @@ import com.google.datastore.v1beta3.Value;
 
 import java.util.Map;
 import java.util.UUID;
-
 import javax.annotation.Nullable;
 
 /**
@@ -80,10 +79,10 @@ import javax.annotation.Nullable;
 public class DatastoreWordCount {
 
   /**
-   * A DoFn that gets the content of an entity (one line in a
+   * A OldDoFn that gets the content of an entity (one line in a
    * Shakespeare play) and converts it to a string.
    */
-  static class GetContentFn extends DoFn<Entity, String> {
+  static class GetContentFn extends OldDoFn<Entity, String> {
     @Override
     public void processElement(ProcessContext c) {
       Map<String, Value> props = c.element().getProperties();
@@ -109,9 +108,9 @@ public class DatastoreWordCount {
   }
 
   /**
-   * A DoFn that creates entity for every line in Shakespeare.
+   * A OldDoFn that creates entity for every line in Shakespeare.
    */
-  static class CreateEntityFn extends DoFn<String, Entity> {
+  static class CreateEntityFn extends OldDoFn<String, Entity> {
     private final String namespace;
     private final String kind;
     private final Key ancestorKey;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
index ea1dcf6..06fba77 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Mean;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.View;
@@ -98,7 +98,7 @@ public class FilterExamples {
    * Examines each row in the input table. Outputs only the subset of the cells this example
    * is interested in-- the mean_temp and year, month, and day-- as a bigquery table row.
    */
-  static class ProjectionFn extends DoFn<TableRow, TableRow> {
+  static class ProjectionFn extends OldDoFn<TableRow, TableRow> {
     @Override
     public void processElement(ProcessContext c){
       TableRow row = c.element();
@@ -119,9 +119,9 @@ public class FilterExamples {
    * Implements 'filter' functionality.
    *
    * <p>Examines each row in the input table. Outputs only rows from the month
-   * monthFilter, which is passed in as a parameter during construction of this DoFn.
+   * monthFilter, which is passed in as a parameter during construction of this OldDoFn.
    */
-  static class FilterSingleMonthDataFn extends DoFn<TableRow, TableRow> {
+  static class FilterSingleMonthDataFn extends OldDoFn<TableRow, TableRow> {
     Integer monthFilter;
 
     public FilterSingleMonthDataFn(Integer monthFilter) {
@@ -143,7 +143,7 @@ public class FilterExamples {
    * Examines each row (weather reading) in the input table. Output the temperature
    * reading for that row ('mean_temp').
    */
-  static class ExtractTempFn extends DoFn<TableRow, Double> {
+  static class ExtractTempFn extends OldDoFn<TableRow, Double> {
     @Override
     public void processElement(ProcessContext c){
       TableRow row = c.element();
@@ -191,7 +191,7 @@ public class FilterExamples {
       PCollection<TableRow> filteredRows = monthFilteredRows
           .apply("ParseAndFilter", ParDo
               .withSideInputs(globalMeanTemp)
-              .of(new DoFn<TableRow, TableRow>() {
+              .of(new OldDoFn<TableRow, TableRow>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   Double meanTemp = Double.parseDouble(c.element().get("mean_temp").toString());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
index 1b43cc2..5260c0d 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.join.CoGbkResult;
 import org.apache.beam.sdk.transforms.join.CoGroupByKey;
@@ -99,7 +99,7 @@ public class JoinExamples {
     // country code 'key' -> string of <event info>, <country name>
     PCollection<KV<String, String>> finalResultCollection =
       kvpCollection.apply("Process", ParDo.of(
-        new DoFn<KV<String, CoGbkResult>, KV<String, String>>() {
+        new OldDoFn<KV<String, CoGbkResult>, KV<String, String>>() {
           @Override
           public void processElement(ProcessContext c) {
             KV<String, CoGbkResult> e = c.element();
@@ -116,7 +116,7 @@ public class JoinExamples {
 
     // write to GCS
     PCollection<String> formattedResults = finalResultCollection
-        .apply("Format", ParDo.of(new DoFn<KV<String, String>, String>() {
+        .apply("Format", ParDo.of(new OldDoFn<KV<String, String>, String>() {
           @Override
           public void processElement(ProcessContext c) {
             String outputstring = "Country code: " + c.element().getKey()
@@ -131,7 +131,7 @@ public class JoinExamples {
    * Examines each row (event) in the input table. Output a KV with the key the country
    * code of the event, and the value a string encoding event information.
    */
-  static class ExtractEventDataFn extends DoFn<TableRow, KV<String, String>> {
+  static class ExtractEventDataFn extends OldDoFn<TableRow, KV<String, String>> {
     @Override
     public void processElement(ProcessContext c) {
       TableRow row = c.element();
@@ -149,7 +149,7 @@ public class JoinExamples {
    * Examines each row (country info) in the input table. Output a KV with the key the country
    * code, and the value the country name.
    */
-  static class ExtractCountryInfoFn extends DoFn<TableRow, KV<String, String>> {
+  static class ExtractCountryInfoFn extends OldDoFn<TableRow, KV<String, String>> {
     @Override
     public void processElement(ProcessContext c) {
       TableRow row = c.element();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
index a37690b..1bcb491 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Max;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.KV;
@@ -82,7 +82,7 @@ public class MaxPerKeyExamples {
    * Examines each row (weather reading) in the input table. Output the month of the reading,
    * and the mean_temp.
    */
-  static class ExtractTempFn extends DoFn<TableRow, KV<Integer, Double>> {
+  static class ExtractTempFn extends OldDoFn<TableRow, KV<Integer, Double>> {
     @Override
     public void processElement(ProcessContext c) {
       TableRow row = c.element();
@@ -96,7 +96,7 @@ public class MaxPerKeyExamples {
    * Format the results to a TableRow, to save to BigQuery.
    *
    */
-  static class FormatMaxesFn extends DoFn<KV<Integer, Double>, TableRow> {
+  static class FormatMaxesFn extends OldDoFn<KV<Integer, Double>, TableRow> {
     @Override
     public void processElement(ProcessContext c) {
       TableRow row = new TableRow()

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
index a0c5181..0be9921 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
@@ -28,9 +28,9 @@ import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.StreamingOptions;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.AfterEach;
@@ -342,7 +342,7 @@ public class TriggerExample {
           .apply(GroupByKey.<String, Integer>create());
 
       PCollection<KV<String, String>> results = flowPerFreeway.apply(ParDo.of(
-          new DoFn <KV<String, Iterable<Integer>>, KV<String, String>>() {
+          new OldDoFn<KV<String, Iterable<Integer>>, KV<String, String>>() {
 
             @Override
             public void processElement(ProcessContext c) throws Exception {
@@ -365,7 +365,7 @@ public class TriggerExample {
    * Format the results of the Total flow calculation to a TableRow, to save to BigQuery.
    * Adds the triggerType, pane information, processing time and the window timestamp.
    * */
-  static class FormatTotalFlow extends DoFn<KV<String, String>, TableRow>
+  static class FormatTotalFlow extends OldDoFn<KV<String, String>, TableRow>
   implements  RequiresWindowAccess {
     private String triggerType;
 
@@ -394,7 +394,7 @@ public class TriggerExample {
    * Extract the freeway and total flow in a reading.
    * Freeway is used as key since we are calculating the total flow for each freeway.
    */
-  static class ExtractFlowInfo extends DoFn<String, KV<String, Integer>> {
+  static class ExtractFlowInfo extends OldDoFn<String, KV<String, Integer>> {
     @Override
     public void processElement(ProcessContext c) throws Exception {
       String[] laneInfo = c.element().split(",");
@@ -471,7 +471,7 @@ public class TriggerExample {
    * Add current time to each record.
    * Also insert a delay at random to demo the triggers.
    */
-  public static class InsertDelays extends DoFn<String, String> {
+  public static class InsertDelays extends OldDoFn<String, String> {
     private static final double THRESHOLD = 0.001;
     // MIN_DELAY and MAX_DELAY in minutes.
     private static final int MIN_DELAY = 1;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
----------------------------------------------------------------------
diff --git a/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java b/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
index ff117dc..26bf8fb 100644
--- a/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
+++ b/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
@@ -46,7 +46,7 @@ import java.util.List;
 @RunWith(JUnit4.class)
 public class WordCountTest {
 
-  /** Example test that tests a specific DoFn. */
+  /** Example test that tests a specific OldDoFn. */
   @Test
   public void testExtractWordsFn() throws Exception {
     DoFnTester<String, String> extractWordsFn =

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
----------------------------------------------------------------------
diff --git a/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java b/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
index b2ed9a2..6f68ce8 100644
--- a/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
+++ b/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
@@ -23,8 +23,8 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Filter;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -171,7 +171,7 @@ public class AutoCompleteTest implements Serializable {
       extends PTransform<PCollection<TimestampedValue<T>>, PCollection<T>> {
     @Override
     public PCollection<T> apply(PCollection<TimestampedValue<T>> input) {
-      return input.apply(ParDo.of(new DoFn<TimestampedValue<T>, T>() {
+      return input.apply(ParDo.of(new OldDoFn<TimestampedValue<T>, T>() {
         @Override
         public void processElement(ProcessContext c) {
           c.outputWithTimestamp(c.element().getValue(), c.element().getTimestamp());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
----------------------------------------------------------------------
diff --git a/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java b/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
index 6f58389..e72a9e8 100644
--- a/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
+++ b/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.DoFnTester;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.FixedWindows;
 import org.apache.beam.sdk.transforms.windowing.Window;
@@ -141,7 +141,7 @@ public class TriggerExampleTest {
     return Joiner.on(",").join(entries);
   }
 
-  static class FormatResults extends DoFn<TableRow, String> {
+  static class FormatResults extends OldDoFn<TableRow, String> {
     @Override
     public void processElement(ProcessContext c) throws Exception {
       TableRow element = c.element();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
index 33b8727..b1407f6 100644
--- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
+++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
@@ -27,10 +27,10 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.MapElements;
 import org.apache.beam.sdk.transforms.Mean;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -126,7 +126,7 @@ public class GameStats extends LeaderBoard {
           .apply("ProcessAndFilter", ParDo
               // use the derived mean total score as a side input
               .withSideInputs(globalMeanScore)
-              .of(new DoFn<KV<String, Integer>, KV<String, Integer>>() {
+              .of(new OldDoFn<KV<String, Integer>, KV<String, Integer>>() {
                 private final Aggregator<Long, Long> numSpammerUsers =
                   createAggregator("SpammerUsers", new Sum.SumLongFn());
                 @Override
@@ -149,7 +149,7 @@ public class GameStats extends LeaderBoard {
   /**
    * Calculate and output an element's session duration.
    */
-  private static class UserSessionInfoFn extends DoFn<KV<String, Integer>, Integer>
+  private static class UserSessionInfoFn extends OldDoFn<KV<String, Integer>, Integer>
       implements RequiresWindowAccess {
 
     @Override
@@ -281,7 +281,7 @@ public class GameStats extends LeaderBoard {
       // Filter out the detected spammer users, using the side input derived above.
       .apply("FilterOutSpammers", ParDo
               .withSideInputs(spammersView)
-              .of(new DoFn<GameActionInfo, GameActionInfo>() {
+              .of(new OldDoFn<GameActionInfo, GameActionInfo>() {
                 @Override
                 public void processElement(ProcessContext c) {
                   // If the user is not in the spammers Map, output the data element.

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
index 28614cb..00dc8a4 100644
--- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
+++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
@@ -28,8 +28,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -123,7 +123,7 @@ public class UserScore {
    * user2_AsparagusPig,AsparagusPig,10,1445230923951,2015-11-02 09:09:28.224
    * The human-readable time string is not used here.
    */
-  static class ParseEventFn extends DoFn<String, GameActionInfo> {
+  static class ParseEventFn extends OldDoFn<String, GameActionInfo> {
 
     // Log and count parse errors.
     private static final Logger LOG = LoggerFactory.getLogger(ParseEventFn.class);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java
index 36ed195..6af6e15 100644
--- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java
+++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition;
 import org.apache.beam.sdk.options.GcpOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -66,10 +66,10 @@ public class WriteToBigQuery<T>
     // The BigQuery 'type' of the field
     private String fieldType;
     // A lambda function to generate the field value
-    private SerializableFunction<DoFn<T, TableRow>.ProcessContext, Object> fieldFn;
+    private SerializableFunction<OldDoFn<T, TableRow>.ProcessContext, Object> fieldFn;
 
     public FieldInfo(String fieldType,
-        SerializableFunction<DoFn<T, TableRow>.ProcessContext, Object> fieldFn) {
+        SerializableFunction<OldDoFn<T, TableRow>.ProcessContext, Object> fieldFn) {
       this.fieldType = fieldType;
       this.fieldFn = fieldFn;
     }
@@ -78,12 +78,12 @@ public class WriteToBigQuery<T>
       return this.fieldType;
     }
 
-    SerializableFunction<DoFn<T, TableRow>.ProcessContext, Object> getFieldFn() {
+    SerializableFunction<OldDoFn<T, TableRow>.ProcessContext, Object> getFieldFn() {
       return this.fieldFn;
     }
   }
   /** Convert each key/score pair into a BigQuery TableRow as specified by fieldFn. */
-  protected class BuildRowFn extends DoFn<T, TableRow> {
+  protected class BuildRowFn extends OldDoFn<T, TableRow> {
 
     @Override
     public void processElement(ProcessContext c) {
@@ -92,7 +92,7 @@ public class WriteToBigQuery<T>
       for (Map.Entry<String, FieldInfo<T>> entry : fieldInfo.entrySet()) {
           String key = entry.getKey();
           FieldInfo<T> fcnInfo = entry.getValue();
-          SerializableFunction<DoFn<T, TableRow>.ProcessContext, Object> fcn =
+          SerializableFunction<OldDoFn<T, TableRow>.ProcessContext, Object> fcn =
             fcnInfo.getFieldFn();
           row.set(key, fcn.apply(c));
         }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java
index b4c9b4a..c59fd61 100644
--- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java
+++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java
@@ -20,8 +20,8 @@ package org.apache.beam.examples.complete.game.utils;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
 import org.apache.beam.sdk.values.PCollection;
@@ -45,7 +45,7 @@ public class WriteWindowedToBigQuery<T>
   }
 
   /** Convert each key/score pair into a BigQuery TableRow. */
-  protected class BuildRowFn extends DoFn<T, TableRow>
+  protected class BuildRowFn extends OldDoFn<T, TableRow>
       implements RequiresWindowAccess {
 
     @Override
@@ -55,7 +55,7 @@ public class WriteWindowedToBigQuery<T>
       for (Map.Entry<String, FieldInfo<T>> entry : fieldInfo.entrySet()) {
           String key = entry.getKey();
           FieldInfo<T> fcnInfo = entry.getValue();
-          SerializableFunction<DoFn<T, TableRow>.ProcessContext, Object> fcn =
+          SerializableFunction<OldDoFn<T, TableRow>.ProcessContext, Object> fcn =
             fcnInfo.getFieldFn();
           row.set(key, fcn.apply(c));
         }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
index cc3e7fa..01efad8 100644
--- a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
+++ b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
@@ -83,7 +83,7 @@ public class UserScoreTest implements Serializable {
       KV.of("AndroidGreenKookaburra", 23),
       KV.of("BisqueBilby", 14));
 
-  /** Test the ParseEventFn DoFn. */
+  /** Test the ParseEventFn OldDoFn. */
   @Test
   public void testParseEventFn() throws Exception {
     DoFnTester<String, GameActionInfo> parseEventFn =

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
index 0d320bc..7cdab00 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
@@ -18,7 +18,7 @@
 package org.apache.beam.runners.core;
 
 import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.DoFnRunner.ReduceFnExecutor;
@@ -41,10 +41,10 @@ import org.apache.beam.sdk.values.KV;
 @SystemDoFnInternal
 public class GroupAlsoByWindowViaWindowSetDoFn<
         K, InputT, OutputT, W extends BoundedWindow, RinT extends KeyedWorkItem<K, InputT>>
-    extends DoFn<RinT, KV<K, OutputT>> implements ReduceFnExecutor<K, InputT, OutputT, W> {
+    extends OldDoFn<RinT, KV<K, OutputT>> implements ReduceFnExecutor<K, InputT, OutputT, W> {
 
   public static <K, InputT, OutputT, W extends BoundedWindow>
-      DoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> create(
+      OldDoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> create(
           WindowingStrategy<?, W> strategy,
           StateInternalsFactory<K> stateInternalsFactory,
           SystemReduceFn<K, InputT, ?, OutputT, W> reduceFn) {
@@ -99,11 +99,11 @@ public class GroupAlsoByWindowViaWindowSetDoFn<
   }
 
   @Override
-  public DoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asDoFn() {
+  public OldDoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asDoFn() {
     // Safe contravariant cast
     @SuppressWarnings("unchecked")
-    DoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asFn =
-        (DoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>>) this;
+    OldDoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asFn =
+        (OldDoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>>) this;
     return asFn;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java
index 5821e73..3ce0c06 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java
@@ -18,6 +18,7 @@
 package org.apache.beam.runners.core;
 
 import static org.apache.beam.sdk.util.StringUtils.approximateSimpleName;
+
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 
@@ -47,7 +48,6 @@ import com.google.common.collect.Lists;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-
 import org.joda.time.Instant;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java
index d40b007..739db45 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java
@@ -19,8 +19,8 @@ package org.apache.beam.sdk.util;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.transforms.windowing.WindowFn;
@@ -32,14 +32,14 @@ import org.joda.time.Instant;
 import java.util.Collection;
 
 /**
- * {@link DoFn} that tags elements of a {@link PCollection} with windows, according to the provided
- * {@link WindowFn}.
+ * {@link OldDoFn} that tags elements of a {@link PCollection} with windows, according to the
+ * provided {@link WindowFn}.
  *
  * @param <T> Type of elements being windowed
  * @param <W> Window type
  */
 @SystemDoFnInternal
-public class AssignWindowsDoFn<T, W extends BoundedWindow> extends DoFn<T, T>
+public class AssignWindowsDoFn<T, W extends BoundedWindow> extends OldDoFn<T, T>
     implements RequiresWindowAccess {
   private WindowFn<? super T, W> fn;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunner.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunner.java
index 4ec8920..49206d1 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunner.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunner.java
@@ -18,41 +18,42 @@
 package org.apache.beam.sdk.util;
 
 import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.ProcessContext;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.ProcessContext;
 import org.apache.beam.sdk.values.KV;
 
 /**
- * An wrapper interface that represents the execution of a {@link DoFn}.
+ * An wrapper interface that represents the execution of a {@link OldDoFn}.
  */
 public interface DoFnRunner<InputT, OutputT> {
   /**
-   * Prepares and calls {@link DoFn#startBundle}.
+   * Prepares and calls {@link OldDoFn#startBundle}.
    */
   public void startBundle();
 
   /**
-   * Calls {@link DoFn#processElement} with a {@link ProcessContext} containing the current element.
+   * Calls {@link OldDoFn#processElement} with a {@link ProcessContext} containing the current
+   * element.
    */
   public void processElement(WindowedValue<InputT> elem);
 
   /**
-   * Calls {@link DoFn#finishBundle} and performs additional tasks, such as
+   * Calls {@link OldDoFn#finishBundle} and performs additional tasks, such as
    * flushing in-memory states.
    */
   public void finishBundle();
 
   /**
-   * An internal interface for signaling that a {@link DoFn} requires late data dropping.
+   * An internal interface for signaling that a {@link OldDoFn} requires late data dropping.
    */
   public interface ReduceFnExecutor<K, InputT, OutputT, W> {
     /**
-     * Gets this object as a {@link DoFn}.
+     * Gets this object as a {@link OldDoFn}.
      *
-     * Most implementors of this interface are expected to be {@link DoFn} instances, and will
+     * Most implementors of this interface are expected to be {@link OldDoFn} instances, and will
      * return themselves.
      */
-    DoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asDoFn();
+    OldDoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asDoFn();
 
     /**
      * Returns an aggregator that tracks elements that are dropped due to being late.


[15/51] [abbrv] incubator-beam git commit: Port microbenchmarks to new vocabulary

Posted by ke...@apache.org.
Port microbenchmarks to new vocabulary


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/e07c3397
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/e07c3397
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/e07c3397

Branch: refs/heads/python-sdk
Commit: e07c3397d268f50cc879362227a6887cc52f4a3b
Parents: 3236eec
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:29:51 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700

----------------------------------------------------------------------
 .../transforms/DoFnReflectorBenchmark.java      | 27 ++++++++++----------
 1 file changed, 14 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/e07c3397/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
----------------------------------------------------------------------
diff --git a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
index fd75e95..233b8be 100644
--- a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
+++ b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
@@ -53,9 +53,10 @@ public class DoFnReflectorBenchmark {
   private OldDoFn<String, String> oldDoFn = new UpperCaseOldDoFn();
   private DoFn<String, String> doFn = new UpperCaseDoFn();
 
-  private StubDoFnProcessContext stubDoFnContext = new StubDoFnProcessContext(oldDoFn, ELEMENT);
-  private StubDoFnWithContextProcessContext stubDoFnWithContextContext =
-      new StubDoFnWithContextProcessContext(doFn, ELEMENT);
+  private StubOldDoFnProcessContext stubOldDoFnContext = new StubOldDoFnProcessContext(oldDoFn,
+      ELEMENT);
+  private StubDoFnProcessContext stubDoFnContext =
+      new StubDoFnProcessContext(doFn, ELEMENT);
   private ExtraContextFactory<String, String> extraContextFactory =
       new ExtraContextFactory<String, String>() {
 
@@ -83,21 +84,21 @@ public class DoFnReflectorBenchmark {
   }
 
   @Benchmark
-  public String invokeDoFn() throws Exception {
-    oldDoFn.processElement(stubDoFnContext);
+  public String invokeOldDoFn() throws Exception {
+    oldDoFn.processElement(stubOldDoFnContext);
     return stubDoFnContext.output;
   }
 
   @Benchmark
   public String invokeDoFnWithContextViaAdaptor() throws Exception {
-    adaptedDoFnWithContext.processElement(stubDoFnContext);
-    return stubDoFnContext.output;
+    adaptedDoFnWithContext.processElement(stubOldDoFnContext);
+    return stubOldDoFnContext.output;
   }
 
   @Benchmark
   public String invokeDoFnWithContext() throws Exception {
-    invoker.invokeProcessElement(stubDoFnWithContextContext, extraContextFactory);
-    return stubDoFnWithContextContext.output;
+    invoker.invokeProcessElement(stubDoFnContext, extraContextFactory);
+    return stubDoFnContext.output;
   }
 
   private static class UpperCaseOldDoFn extends OldDoFn<String, String> {
@@ -116,12 +117,12 @@ public class DoFnReflectorBenchmark {
     }
   }
 
-  private static class StubDoFnProcessContext extends OldDoFn<String, String>.ProcessContext {
+  private static class StubOldDoFnProcessContext extends OldDoFn<String, String>.ProcessContext {
 
     private final String element;
     private String output;
 
-    public StubDoFnProcessContext(OldDoFn<String, String> fn, String element) {
+    public StubOldDoFnProcessContext(OldDoFn<String, String> fn, String element) {
       fn.super();
       this.element = element;
     }
@@ -186,12 +187,12 @@ public class DoFnReflectorBenchmark {
     }
   }
 
-  private static class StubDoFnWithContextProcessContext
+  private static class StubDoFnProcessContext
       extends DoFn<String, String>.ProcessContext {
     private final String element;
     private  String output;
 
-    public StubDoFnWithContextProcessContext(DoFn<String, String> fn, String element) {
+    public StubDoFnProcessContext(DoFn<String, String> fn, String element) {
       fn.super();
       this.element = element;
     }


[51/51] [abbrv] incubator-beam git commit: This closes #787

Posted by ke...@apache.org.
This closes #787


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/d72ffb08
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/d72ffb08
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/d72ffb08

Branch: refs/heads/python-sdk
Commit: d72ffb0804bfc6f4b4a5e21ed01c6c42424dae7e
Parents: 65152ca c398811
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Aug 5 19:52:24 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Fri Aug 5 19:52:24 2016 -0700

----------------------------------------------------------------------
 .travis.yml                                     |    6 +-
 KEYS                                            |  141 -
 NOTICE                                          |    1 -
 README.md                                       |   32 +-
 examples/java/pom.xml                           |   73 +-
 .../beam/examples/DebuggingWordCount.java       |    4 +-
 .../apache/beam/examples/MinimalWordCount.java  |   30 +-
 .../apache/beam/examples/WindowedWordCount.java |   91 +-
 .../org/apache/beam/examples/WordCount.java     |    6 +-
 .../examples/common/DataflowExampleOptions.java |   37 -
 .../examples/common/DataflowExampleUtils.java   |  491 --
 .../common/ExampleBigQueryTableOptions.java     |   10 +-
 .../beam/examples/common/ExampleOptions.java    |   83 +
 ...xamplePubsubTopicAndSubscriptionOptions.java |   10 +-
 .../common/ExamplePubsubTopicOptions.java       |   12 +-
 .../beam/examples/common/ExampleUtils.java      |  390 ++
 .../examples/common/PubsubFileInjector.java     |   10 +-
 .../beam/examples/complete/AutoComplete.java    |   91 +-
 .../examples/complete/StreamingWordExtract.java |   56 +-
 .../apache/beam/examples/complete/TfIdf.java    |   28 +-
 .../examples/complete/TopWikipediaSessions.java |   38 +-
 .../examples/complete/TrafficMaxLaneFlow.java   |   90 +-
 .../beam/examples/complete/TrafficRoutes.java   |   90 +-
 .../examples/cookbook/BigQueryTornadoes.java    |    6 +-
 .../cookbook/CombinePerKeyExamples.java         |    6 +-
 .../examples/cookbook/DatastoreWordCount.java   |   67 +-
 .../beam/examples/cookbook/DeDupExample.java    |    5 +-
 .../beam/examples/cookbook/FilterExamples.java  |   17 +-
 .../beam/examples/cookbook/JoinExamples.java    |   14 +-
 .../examples/cookbook/MaxPerKeyExamples.java    |    6 +-
 .../beam/examples/cookbook/TriggerExample.java  |   97 +-
 .../org/apache/beam/examples/WordCountIT.java   |  118 +-
 .../org/apache/beam/examples/WordCountTest.java |    3 +-
 .../examples/complete/AutoCompleteTest.java     |    2 +-
 .../examples/cookbook/TriggerExampleTest.java   |    2 +-
 examples/java8/pom.xml                          |   81 +-
 .../beam/examples/MinimalWordCountJava8.java    |   30 +-
 .../beam/examples/complete/game/GameStats.java  |   57 +-
 .../examples/complete/game/HourlyTeamScore.java |    9 +-
 .../examples/complete/game/LeaderBoard.java     |   23 +-
 .../beam/examples/complete/game/UserScore.java  |    4 +-
 .../complete/game/injector/InjectorUtils.java   |    6 +-
 .../injector/RetryHttpInitializerWrapper.java   |    5 +-
 .../complete/game/utils/WriteToBigQuery.java    |   20 +-
 .../game/utils/WriteWindowedToBigQuery.java     |   16 +-
 .../complete/game/HourlyTeamScoreTest.java      |    2 +-
 .../examples/complete/game/UserScoreTest.java   |    2 +-
 examples/pom.xml                                |    4 +-
 pom.xml                                         |  265 +-
 runners/core-java/pom.xml                       |   55 +-
 .../core/GroupAlsoByWindowViaWindowSetDoFn.java |   28 +-
 .../core/UnboundedReadFromBoundedSource.java    |  542 ++
 .../apache/beam/runners/core/package-info.java  |   22 +
 .../org/apache/beam/sdk/util/AssignWindows.java |   46 +
 .../apache/beam/sdk/util/AssignWindowsDoFn.java |   80 +
 .../beam/sdk/util/BatchTimerInternals.java      |  141 +
 .../org/apache/beam/sdk/util/DoFnRunner.java    |   63 +
 .../apache/beam/sdk/util/DoFnRunnerBase.java    |  551 ++
 .../org/apache/beam/sdk/util/DoFnRunners.java   |  222 +
 .../beam/sdk/util/GroupAlsoByWindowsDoFn.java   |   63 +
 .../GroupAlsoByWindowsViaOutputBufferDoFn.java  |  100 +
 .../sdk/util/GroupByKeyViaGroupByKeyOnly.java   |  269 +
 .../sdk/util/LateDataDroppingDoFnRunner.java    |  147 +
 .../org/apache/beam/sdk/util/NonEmptyPanes.java |  150 +
 .../apache/beam/sdk/util/PaneInfoTracker.java   |  158 +
 .../sdk/util/PushbackSideInputDoFnRunner.java   |  115 +
 .../java/org/apache/beam/sdk/util/ReduceFn.java |  130 +
 .../beam/sdk/util/ReduceFnContextFactory.java   |  497 ++
 .../apache/beam/sdk/util/ReduceFnRunner.java    |  988 ++++
 .../apache/beam/sdk/util/SimpleDoFnRunner.java  |   55 +
 .../apache/beam/sdk/util/SystemReduceFn.java    |  139 +
 .../org/apache/beam/sdk/util/TriggerRunner.java |  245 +
 .../org/apache/beam/sdk/util/WatermarkHold.java |  540 ++
 .../util/common/ElementByteSizeObservable.java  |   42 +
 .../beam/sdk/util/common/PeekingReiterator.java |   99 +
 .../beam/sdk/util/common/package-info.java      |   20 +
 .../org/apache/beam/sdk/util/package-info.java  |   20 +
 .../UnboundedReadFromBoundedSourceTest.java     |  374 ++
 .../beam/sdk/util/BatchTimerInternalsTest.java  |  118 +
 .../sdk/util/GroupAlsoByWindowsProperties.java  |  661 +++
 ...oupAlsoByWindowsViaOutputBufferDoFnTest.java |  110 +
 .../util/LateDataDroppingDoFnRunnerTest.java    |  117 +
 .../util/PushbackSideInputDoFnRunnerTest.java   |  234 +
 .../beam/sdk/util/ReduceFnRunnerTest.java       | 1447 ++++++
 .../apache/beam/sdk/util/ReduceFnTester.java    |  789 +++
 .../beam/sdk/util/SimpleDoFnRunnerTest.java     |   86 +
 runners/direct-java/pom.xml                     |   22 +-
 .../runners/direct/AggregatorContainer.java     |  183 +
 .../direct/AvroIOShardedWriteFactory.java       |   76 -
 .../direct/BoundedReadEvaluatorFactory.java     |   26 +-
 .../beam/runners/direct/CloningThreadLocal.java |   43 +
 .../beam/runners/direct/CommittedResult.java    |   23 +-
 .../beam/runners/direct/CompletionCallback.java |    8 +
 .../beam/runners/direct/DirectGroupByKey.java   |    2 +-
 .../beam/runners/direct/DirectRegistrar.java    |    4 +-
 .../beam/runners/direct/DirectRunner.java       |   38 +-
 .../beam/runners/direct/EvaluationContext.java  |   57 +-
 .../beam/runners/direct/EvaluatorKey.java       |   55 -
 .../direct/ExecutorServiceParallelExecutor.java |  186 +-
 .../GroupAlsoByWindowEvaluatorFactory.java      |   33 +-
 .../ImmutabilityCheckingBundleFactory.java      |    4 +-
 .../beam/runners/direct/ParDoEvaluator.java     |   24 +-
 .../direct/ParDoMultiEvaluatorFactory.java      |   53 +-
 .../direct/ParDoSingleEvaluatorFactory.java     |   59 +-
 ...rializableCloningThreadLocalCacheLoader.java |   54 -
 .../runners/direct/ShardControlledWrite.java    |   81 -
 .../runners/direct/StepTransformResult.java     |   95 +-
 .../direct/TextIOShardedWriteFactory.java       |   78 -
 .../direct/TransformEvaluatorFactory.java       |   10 +-
 .../beam/runners/direct/TransformExecutor.java  |    3 +-
 .../beam/runners/direct/TransformResult.java    |   16 +-
 .../direct/UnboundedReadDeduplicator.java       |  102 +
 .../direct/UnboundedReadEvaluatorFactory.java   |   50 +-
 .../runners/direct/ViewEvaluatorFactory.java    |    9 +-
 .../beam/runners/direct/WatermarkManager.java   |    5 +-
 .../runners/direct/WindowEvaluatorFactory.java  |   18 +-
 .../direct/WriteWithShardingFactory.java        |  142 +
 .../beam/runners/direct/package-info.java       |   25 +
 .../runners/direct/AggregatorContainerTest.java |  134 +
 .../direct/AvroIOShardedWriteFactoryTest.java   |  120 -
 .../runners/direct/CloningThreadLocalTest.java  |   92 +
 .../runners/direct/CommittedResultTest.java     |   30 +-
 .../ConsumerTrackingPipelineVisitorTest.java    |   22 +-
 .../runners/direct/DirectRegistrarTest.java     |   17 +-
 .../beam/runners/direct/DirectRunnerTest.java   |   59 +-
 .../runners/direct/EvaluationContextTest.java   |   32 +-
 .../direct/GroupByKeyEvaluatorFactoryTest.java  |    2 +-
 .../GroupByKeyOnlyEvaluatorFactoryTest.java     |    2 +-
 .../ImmutabilityCheckingBundleFactoryTest.java  |    6 +-
 .../ImmutabilityEnforcementFactoryTest.java     |    6 +-
 .../direct/KeyedPValueTrackingVisitorTest.java  |    6 +-
 .../beam/runners/direct/ParDoEvaluatorTest.java |   18 +-
 .../direct/ParDoMultiEvaluatorFactoryTest.java  |   41 +-
 .../direct/ParDoSingleEvaluatorFactoryTest.java |   41 +-
 ...izableCloningThreadLocalCacheLoaderTest.java |   99 -
 .../runners/direct/SideInputContainerTest.java  |    1 +
 .../runners/direct/StepTransformResultTest.java |   91 +
 .../direct/TextIOShardedWriteFactoryTest.java   |  120 -
 .../runners/direct/TransformExecutorTest.java   |   33 +-
 .../direct/UnboundedReadDeduplicatorTest.java   |  134 +
 .../UnboundedReadEvaluatorFactoryTest.java      |   50 +-
 .../runners/direct/WatermarkManagerTest.java    |   18 +-
 .../direct/WindowEvaluatorFactoryTest.java      |  178 +-
 .../direct/WriteWithShardingFactoryTest.java    |  285 +
 runners/flink/README.md                         |   25 +-
 runners/flink/examples/pom.xml                  |   27 +-
 .../beam/runners/flink/examples/TFIDF.java      |   63 +-
 .../beam/runners/flink/examples/WordCount.java  |   12 +-
 .../flink/examples/streaming/AutoComplete.java  |   29 +-
 .../flink/examples/streaming/JoinExamples.java  |   23 +-
 .../examples/streaming/KafkaIOExamples.java     |    4 +-
 .../KafkaWindowedWordCountExample.java          |    8 +-
 .../examples/streaming/WindowedWordCount.java   |    9 +-
 runners/flink/pom.xml                           |    8 +-
 runners/flink/runner/pom.xml                    |  130 +-
 .../FlinkPipelineExecutionEnvironment.java      |    6 +-
 .../apache/beam/runners/flink/FlinkRunner.java  |    2 +-
 .../beam/runners/flink/FlinkRunnerResult.java   |   22 +-
 .../FlinkBatchTransformTranslators.java         |   31 +-
 .../FlinkStreamingTransformTranslators.java     |   22 +-
 .../FlinkStreamingTranslationContext.java       |    8 +-
 .../functions/FlinkAssignContext.java           |   15 +-
 .../functions/FlinkDoFnFunction.java            |   10 +-
 .../FlinkMergingNonShuffleReduceFunction.java   |    8 +-
 .../functions/FlinkMultiOutputDoFnFunction.java |   10 +-
 .../FlinkMultiOutputProcessContext.java         |    6 +-
 .../functions/FlinkNoElementAssignContext.java  |   12 +-
 .../functions/FlinkPartialReduceFunction.java   |    8 +-
 .../functions/FlinkProcessContext.java          |   41 +-
 .../functions/FlinkReduceFunction.java          |    8 +-
 .../translation/types/CoderTypeInformation.java |    6 +-
 .../utils/SerializedPipelineOptions.java        |    8 +-
 .../streaming/FlinkAbstractParDoWrapper.java    |   34 +-
 .../FlinkGroupAlsoByWindowWrapper.java          |   64 +-
 .../streaming/FlinkParDoBoundMultiWrapper.java  |   12 +-
 .../streaming/FlinkParDoBoundWrapper.java       |    4 +-
 .../streaming/io/UnboundedFlinkSink.java        |    2 +-
 .../streaming/io/UnboundedFlinkSource.java      |   30 +-
 .../state/AbstractFlinkTimerInternals.java      |    4 +-
 .../streaming/state/FlinkStateInternals.java    |    7 +-
 .../beam/runners/flink/PipelineOptionsTest.java |    6 +-
 .../beam/runners/flink/ReadSourceITCase.java    |    4 +-
 .../flink/ReadSourceStreamingITCase.java        |    4 +-
 .../beam/runners/flink/WriteSinkITCase.java     |   14 +-
 .../flink/streaming/GroupAlsoByWindowTest.java  |    4 +-
 .../flink/streaming/GroupByNullKeyTest.java     |    8 +-
 .../flink/streaming/StateSerializationTest.java |   20 +
 .../streaming/TopWikipediaSessionsITCase.java   |    6 +-
 runners/google-cloud-dataflow-java/pom.xml      |   78 +-
 .../dataflow/BlockingDataflowRunner.java        |    8 +-
 .../runners/dataflow/DataflowPipelineJob.java   |   90 +-
 .../dataflow/DataflowPipelineTranslator.java    |   21 +-
 .../beam/runners/dataflow/DataflowRunner.java   |  511 +-
 .../dataflow/internal/AssignWindows.java        |    6 +-
 .../DataflowUnboundedReadFromBoundedSource.java |  547 ++
 .../runners/dataflow/internal/IsmFormat.java    |    8 +-
 .../BlockingDataflowPipelineOptions.java        |   27 -
 .../options/DataflowPipelineDebugOptions.java   |   43 -
 .../options/DataflowPipelineOptions.java        |   46 +-
 .../options/DataflowWorkerLoggingOptions.java   |   14 +-
 .../runners/dataflow/options/package-info.java  |   22 +
 .../beam/runners/dataflow/package-info.java     |   22 +
 .../dataflow/testing/TestDataflowRunner.java    |   23 +-
 .../runners/dataflow/testing/package-info.java  |   24 +
 .../dataflow/util/DataflowPathValidator.java    |  100 -
 .../beam/runners/dataflow/util/DoFnInfo.java    |   16 +-
 .../beam/runners/dataflow/util/GcsStager.java   |    5 +-
 .../runners/dataflow/util/MonitoringUtil.java   |   70 +-
 .../runners/dataflow/util/package-info.java     |   20 +
 .../util/GroupAlsoByWindowViaWindowSetDoFn.java |   40 -
 .../dataflow/BlockingDataflowRunnerTest.java    |    9 +-
 .../dataflow/DataflowPipelineJobTest.java       |   43 +-
 .../DataflowPipelineTranslatorTest.java         |   26 +-
 .../runners/dataflow/DataflowRunnerTest.java    |  144 +-
 .../dataflow/RecordingPipelineVisitor.java      |   46 +
 .../runners/dataflow/io/DataflowAvroIOTest.java |   69 -
 .../dataflow/io/DataflowBigQueryIOTest.java     |   94 -
 .../dataflow/io/DataflowDatastoreIOTest.java    |   63 -
 .../dataflow/io/DataflowPubsubIOTest.java       |   63 -
 .../runners/dataflow/io/DataflowTextIOTest.java |   76 -
 .../options/DataflowPipelineOptionsTest.java    |   65 +
 .../testing/TestDataflowRunnerTest.java         |   46 +-
 .../transforms/DataflowCombineTest.java         |   58 -
 .../DataflowDisplayDataEvaluator.java           |   72 -
 .../transforms/DataflowGroupByKeyTest.java      |    2 +-
 .../transforms/DataflowMapElementsTest.java     |   55 -
 .../dataflow/transforms/DataflowViewTest.java   |    4 +-
 .../util/DataflowPathValidatorTest.java         |   94 -
 .../dataflow/util/MonitoringUtilTest.java       |   60 +
 runners/pom.xml                                 |   74 +-
 runners/spark/README.md                         |    8 +-
 runners/spark/pom.xml                           |  102 +-
 .../runners/spark/SparkPipelineOptions.java     |   13 +-
 .../beam/runners/spark/SparkPipelineRunner.java |  255 -
 .../apache/beam/runners/spark/SparkRunner.java  |  249 +
 .../runners/spark/SparkRunnerRegistrar.java     |   14 +-
 .../spark/SparkStreamingPipelineOptions.java    |   41 -
 .../runners/spark/TestSparkPipelineRunner.java  |   77 -
 .../beam/runners/spark/TestSparkRunner.java     |   75 +
 .../runners/spark/aggregators/package-info.java |   20 +
 .../beam/runners/spark/coders/package-info.java |   22 +
 .../beam/runners/spark/examples/WordCount.java  |  137 +
 .../beam/runners/spark/io/CreateStream.java     |    7 +-
 .../apache/beam/runners/spark/io/KafkaIO.java   |   23 +-
 .../beam/runners/spark/io/hadoop/HadoopIO.java  |   38 +-
 .../runners/spark/io/hadoop/package-info.java   |   22 +
 .../beam/runners/spark/io/package-info.java     |   22 +
 .../apache/beam/runners/spark/package-info.java |   22 +
 .../runners/spark/translation/DoFnFunction.java |    8 +-
 .../spark/translation/EvaluationContext.java    |   21 +
 .../spark/translation/MultiDoFnFunction.java    |    8 +-
 .../spark/translation/SparkContextFactory.java  |    8 +-
 .../translation/SparkPipelineEvaluator.java     |    6 +-
 .../spark/translation/SparkProcessContext.java  |   18 +-
 .../spark/translation/TransformTranslator.java  |   73 +-
 .../runners/spark/translation/package-info.java |   22 +
 .../streaming/StreamingTransformTranslator.java |    4 +-
 .../StreamingWindowPipelineDetector.java        |    6 +-
 .../translation/streaming/package-info.java     |   22 +
 .../beam/runners/spark/util/package-info.java   |   22 +
 .../apache/beam/runners/spark/DeDupTest.java    |    4 +-
 .../beam/runners/spark/EmptyInputTest.java      |    4 +-
 .../beam/runners/spark/SimpleWordCountTest.java |   87 +-
 .../runners/spark/SparkRunnerRegistrarTest.java |    4 +-
 .../apache/beam/runners/spark/TfIdfTest.java    |  203 +-
 .../beam/runners/spark/io/AvroPipelineTest.java |    9 +-
 .../beam/runners/spark/io/NumShardsTest.java    |    8 +-
 .../io/hadoop/HadoopFileFormatPipelineTest.java |    9 +-
 .../spark/translation/CombineGloballyTest.java  |    6 +-
 .../spark/translation/CombinePerKeyTest.java    |   13 +-
 .../spark/translation/DoFnOutputTest.java       |   14 +-
 .../translation/MultiOutputWordCountTest.java   |   17 +-
 .../spark/translation/SerializationTest.java    |   16 +-
 .../spark/translation/SideEffectsTest.java      |   14 +-
 .../translation/TransformTranslatorTest.java    |   45 +-
 .../translation/WindowedWordCountTest.java      |   20 +-
 .../streaming/FlattenStreamingTest.java         |   14 +-
 .../streaming/KafkaStreamingTest.java           |   23 +-
 .../streaming/SimpleStreamingWordCountTest.java |   20 +-
 .../streaming/utils/EmbeddedKafkaCluster.java   |    4 +-
 runners/spark/src/test/resources/person.avsc    |   25 +-
 runners/spark/src/test/resources/pg1112.txt     | 4853 ------------------
 runners/spark/src/test/resources/pg2264.txt     | 3667 -------------
 sdks/java/build-tools/pom.xml                   |    2 +-
 .../src/main/resources/beam/checkstyle.xml      |   19 +-
 .../src/main/resources/beam/findbugs-filter.xml |  142 +
 .../src/main/resources/beam/suppressions.xml    |   24 +
 sdks/java/core/pom.xml                          |   61 +-
 .../main/java/org/apache/beam/sdk/Pipeline.java |    9 +-
 .../org/apache/beam/sdk/PipelineResult.java     |   38 +
 .../org/apache/beam/sdk/coders/AvroCoder.java   |   21 +-
 .../java/org/apache/beam/sdk/coders/Coder.java  |   12 +-
 .../apache/beam/sdk/coders/CoderRegistry.java   |   43 +-
 .../apache/beam/sdk/coders/CollectionCoder.java |    7 +-
 .../apache/beam/sdk/coders/DelegateCoder.java   |   26 +-
 .../apache/beam/sdk/coders/DurationCoder.java   |    1 -
 .../org/apache/beam/sdk/coders/EntityCoder.java |   87 -
 .../apache/beam/sdk/coders/InstantCoder.java    |   34 +-
 .../apache/beam/sdk/coders/IterableCoder.java   |    7 +-
 .../beam/sdk/coders/IterableLikeCoder.java      |   10 +-
 .../org/apache/beam/sdk/coders/JAXBCoder.java   |   28 +-
 .../org/apache/beam/sdk/coders/KvCoder.java     |    7 +-
 .../org/apache/beam/sdk/coders/ListCoder.java   |    7 +-
 .../org/apache/beam/sdk/coders/MapCoder.java    |    6 +-
 .../apache/beam/sdk/coders/NullableCoder.java   |    9 +-
 .../beam/sdk/coders/SerializableCoder.java      |    6 +-
 .../org/apache/beam/sdk/coders/SetCoder.java    |    7 +-
 .../beam/sdk/coders/StringDelegateCoder.java    |   51 +-
 .../java/org/apache/beam/sdk/io/AvroIO.java     |   74 +-
 .../java/org/apache/beam/sdk/io/AvroSource.java |   14 +-
 .../java/org/apache/beam/sdk/io/BigQueryIO.java | 2493 ---------
 .../sdk/io/BoundedReadFromUnboundedSource.java  |    6 +-
 .../apache/beam/sdk/io/CompressedSource.java    |   81 +-
 .../org/apache/beam/sdk/io/DatastoreIO.java     |  988 ----
 .../org/apache/beam/sdk/io/FileBasedSink.java   |   22 +-
 .../apache/beam/sdk/io/OffsetBasedSource.java   |   44 +-
 .../java/org/apache/beam/sdk/io/PubsubIO.java   |   49 +-
 .../apache/beam/sdk/io/PubsubUnboundedSink.java |   37 +-
 .../beam/sdk/io/PubsubUnboundedSource.java      |    8 +-
 .../main/java/org/apache/beam/sdk/io/Read.java  |   29 +-
 .../java/org/apache/beam/sdk/io/Source.java     |    2 +-
 .../java/org/apache/beam/sdk/io/TextIO.java     |   87 +-
 .../org/apache/beam/sdk/io/UnboundedSource.java |   28 +-
 .../main/java/org/apache/beam/sdk/io/Write.java |  332 +-
 .../java/org/apache/beam/sdk/io/XmlSink.java    |   10 +-
 .../java/org/apache/beam/sdk/io/XmlSource.java  |   11 +-
 .../org/apache/beam/sdk/io/package-info.java    |    9 +-
 .../beam/sdk/io/range/ByteKeyRangeTracker.java  |   63 +-
 .../beam/sdk/io/range/OffsetRangeTracker.java   |   13 +-
 .../org/apache/beam/sdk/options/GcpOptions.java |   37 +-
 .../org/apache/beam/sdk/options/GcsOptions.java |   44 +
 .../beam/sdk/options/PipelineOptions.java       |    3 +-
 .../sdk/options/PipelineOptionsFactory.java     |   41 +-
 .../sdk/options/PipelineOptionsReflector.java   |    1 +
 .../sdk/options/PipelineOptionsValidator.java   |   18 +-
 .../sdk/options/ProxyInvocationHandler.java     |   15 +-
 .../beam/sdk/runners/AggregatorValues.java      |    4 +-
 .../apache/beam/sdk/runners/PipelineRunner.java |    6 +-
 .../sdk/runners/RecordingPipelineVisitor.java   |   47 -
 .../beam/sdk/runners/TransformHierarchy.java    |   10 +-
 .../beam/sdk/runners/TransformTreeNode.java     |   13 +-
 .../beam/sdk/testing/CoderProperties.java       |   85 +-
 .../org/apache/beam/sdk/testing/PAssert.java    |  532 +-
 .../apache/beam/sdk/testing/PaneExtractors.java |  140 +
 .../beam/sdk/testing/SerializableMatchers.java  |   27 +-
 .../apache/beam/sdk/testing/StaticWindows.java  |  110 +
 .../apache/beam/sdk/testing/TestPipeline.java   |    4 +-
 .../beam/sdk/testing/TestPipelineOptions.java   |    1 +
 .../beam/sdk/testing/WindowFnTestUtils.java     |    5 +-
 .../apache/beam/sdk/testing/WindowSupplier.java |   83 +
 .../apache/beam/sdk/transforms/Aggregator.java  |   32 +-
 .../sdk/transforms/AggregatorRetriever.java     |    6 +-
 .../sdk/transforms/ApproximateQuantiles.java    |    9 +-
 .../org/apache/beam/sdk/transforms/Combine.java |  238 +-
 .../apache/beam/sdk/transforms/CombineFns.java  |   10 +-
 .../org/apache/beam/sdk/transforms/Count.java   |    4 +-
 .../org/apache/beam/sdk/transforms/Create.java  |    7 +-
 .../org/apache/beam/sdk/transforms/DoFn.java    |  418 +-
 .../beam/sdk/transforms/DoFnReflector.java      |  807 ++-
 .../apache/beam/sdk/transforms/DoFnTester.java  |  202 +-
 .../beam/sdk/transforms/DoFnWithContext.java    |  429 --
 .../org/apache/beam/sdk/transforms/Filter.java  |    5 +-
 .../beam/sdk/transforms/FlatMapElements.java    |  126 +-
 .../org/apache/beam/sdk/transforms/Flatten.java |    4 +-
 .../apache/beam/sdk/transforms/GroupByKey.java  |    2 +-
 .../transforms/IntraBundleParallelization.java  |   49 +-
 .../org/apache/beam/sdk/transforms/Keys.java    |   13 +-
 .../org/apache/beam/sdk/transforms/KvSwap.java  |   15 +-
 .../apache/beam/sdk/transforms/MapElements.java |   60 +-
 .../org/apache/beam/sdk/transforms/Max.java     |   42 +-
 .../org/apache/beam/sdk/transforms/Min.java     |   45 +-
 .../org/apache/beam/sdk/transforms/OldDoFn.java |  567 ++
 .../apache/beam/sdk/transforms/PTransform.java  |   29 +-
 .../org/apache/beam/sdk/transforms/ParDo.java   |  287 +-
 .../apache/beam/sdk/transforms/Partition.java   |    2 +-
 .../beam/sdk/transforms/RemoveDuplicates.java   |   13 +-
 .../org/apache/beam/sdk/transforms/Sample.java  |   10 +-
 .../beam/sdk/transforms/SimpleFunction.java     |   42 +-
 .../org/apache/beam/sdk/transforms/Top.java     |    7 +-
 .../org/apache/beam/sdk/transforms/Values.java  |   15 +-
 .../apache/beam/sdk/transforms/WithKeys.java    |   15 +-
 .../beam/sdk/transforms/WithTimestamps.java     |    4 +-
 .../sdk/transforms/display/DisplayData.java     |    7 +-
 .../beam/sdk/transforms/join/CoGbkResult.java   |    9 +-
 .../beam/sdk/transforms/join/CoGroupByKey.java  |   16 +-
 .../beam/sdk/transforms/windowing/AfterAll.java |    6 +-
 .../windowing/AfterDelayFromFirstElement.java   |    6 +
 .../sdk/transforms/windowing/AfterEach.java     |    1 +
 .../sdk/transforms/windowing/AfterFirst.java    |    9 +-
 .../sdk/transforms/windowing/AfterPane.java     |    4 +
 .../windowing/AfterProcessingTime.java          |    6 +-
 .../transforms/windowing/AfterWatermark.java    |   38 +-
 .../sdk/transforms/windowing/GlobalWindows.java |    5 -
 .../transforms/windowing/IntervalWindow.java    |    1 -
 .../beam/sdk/transforms/windowing/Never.java    |    1 +
 .../beam/sdk/transforms/windowing/PaneInfo.java |   27 +-
 .../windowing/PartitioningWindowFn.java         |    5 -
 .../beam/sdk/transforms/windowing/Trigger.java  |    7 +-
 .../transforms/windowing/TriggerBuilder.java    |   29 -
 .../beam/sdk/transforms/windowing/Window.java   |   97 +-
 .../beam/sdk/transforms/windowing/WindowFn.java |   11 +-
 .../org/apache/beam/sdk/util/AssignWindows.java |   46 -
 .../apache/beam/sdk/util/AssignWindowsDoFn.java |   75 -
 ...AttemptAndTimeBoundedExponentialBackOff.java |   15 +-
 .../util/AttemptBoundedExponentialBackOff.java  |    9 +-
 .../org/apache/beam/sdk/util/AvroUtils.java     |  207 -
 .../beam/sdk/util/BaseExecutionContext.java     |    4 +-
 .../beam/sdk/util/BatchTimerInternals.java      |  140 -
 .../apache/beam/sdk/util/BigQueryServices.java  |  165 -
 .../beam/sdk/util/BigQueryServicesImpl.java     |  478 --
 .../beam/sdk/util/BigQueryTableInserter.java    |  459 --
 .../beam/sdk/util/BigQueryTableRowIterator.java |  472 --
 .../apache/beam/sdk/util/BucketingFunction.java |    1 +
 .../org/apache/beam/sdk/util/CoderUtils.java    |    4 +-
 .../beam/sdk/util/CombineContextFactory.java    |    6 +-
 .../apache/beam/sdk/util/CounterAggregator.java |   35 +-
 .../org/apache/beam/sdk/util/Credentials.java   |    5 +-
 .../org/apache/beam/sdk/util/DoFnRunner.java    |   62 -
 .../apache/beam/sdk/util/DoFnRunnerBase.java    |  558 --
 .../org/apache/beam/sdk/util/DoFnRunners.java   |  144 -
 .../apache/beam/sdk/util/ExecutableTrigger.java |   11 +-
 .../apache/beam/sdk/util/ExecutionContext.java  |    8 +-
 .../sdk/util/ExposedByteArrayInputStream.java   |    3 +
 .../sdk/util/ExposedByteArrayOutputStream.java  |    4 +
 .../beam/sdk/util/FileIOChannelFactory.java     |   34 +-
 .../apache/beam/sdk/util/GatherAllPanes.java    |   16 +-
 .../apache/beam/sdk/util/GcsPathValidator.java  |   97 +
 .../java/org/apache/beam/sdk/util/GcsUtil.java  |   10 +-
 .../beam/sdk/util/GroupAlsoByWindowsDoFn.java   |   59 -
 .../GroupAlsoByWindowsViaOutputBufferDoFn.java  |  100 -
 .../sdk/util/GroupByKeyViaGroupByKeyOnly.java   |  247 -
 .../apache/beam/sdk/util/IOChannelUtils.java    |   26 +-
 .../apache/beam/sdk/util/IdentityWindowFn.java  |   20 +-
 .../apache/beam/sdk/util/InstanceBuilder.java   |   21 +-
 .../util/IntervalBoundedExponentialBackOff.java |   10 +-
 .../sdk/util/LateDataDroppingDoFnRunner.java    |  147 -
 .../beam/sdk/util/MergingActiveWindowSet.java   |   35 +-
 .../apache/beam/sdk/util/MovingFunction.java    |    1 +
 .../org/apache/beam/sdk/util/NonEmptyPanes.java |  150 -
 .../apache/beam/sdk/util/PCollectionViews.java  |    4 +-
 .../apache/beam/sdk/util/PaneInfoTracker.java   |  154 -
 .../beam/sdk/util/PerKeyCombineFnRunner.java    |   44 +-
 .../beam/sdk/util/PerKeyCombineFnRunners.java   |   30 +-
 .../org/apache/beam/sdk/util/PubsubClient.java  |    1 +
 .../apache/beam/sdk/util/PubsubTestClient.java  |    1 +
 .../sdk/util/PushbackSideInputDoFnRunner.java   |  115 -
 .../java/org/apache/beam/sdk/util/ReduceFn.java |  130 -
 .../beam/sdk/util/ReduceFnContextFactory.java   |  497 --
 .../apache/beam/sdk/util/ReduceFnRunner.java    |  985 ----
 .../sdk/util/ReifyTimestampAndWindowsDoFn.java  |    6 +-
 .../sdk/util/ReifyTimestampsAndWindows.java     |   63 +
 .../org/apache/beam/sdk/util/ReleaseInfo.java   |   12 +-
 .../org/apache/beam/sdk/util/Reshuffle.java     |   10 +-
 .../apache/beam/sdk/util/SerializableUtils.java |   15 +-
 .../apache/beam/sdk/util/SimpleDoFnRunner.java  |   56 -
 .../org/apache/beam/sdk/util/StringUtils.java   |   14 +-
 .../beam/sdk/util/SystemDoFnInternal.java       |    6 +-
 .../apache/beam/sdk/util/SystemReduceFn.java    |  135 -
 .../org/apache/beam/sdk/util/TimeDomain.java    |    2 +-
 .../apache/beam/sdk/util/TimerInternals.java    |    6 +-
 .../beam/sdk/util/TriggerContextFactory.java    |   16 +-
 .../org/apache/beam/sdk/util/TriggerRunner.java |  234 -
 .../apache/beam/sdk/util/ValueWithRecordId.java |   27 +-
 .../org/apache/beam/sdk/util/WatermarkHold.java |  536 --
 .../org/apache/beam/sdk/util/WindowedValue.java |  205 +-
 .../beam/sdk/util/WindowingInternals.java       |    4 +-
 .../apache/beam/sdk/util/WindowingStrategy.java |    6 +-
 .../java/org/apache/beam/sdk/util/ZipFiles.java |   11 +
 .../apache/beam/sdk/util/common/Counter.java    |    2 +-
 .../util/common/ElementByteSizeObservable.java  |   42 -
 .../util/common/ElementByteSizeObserver.java    |   18 +-
 .../beam/sdk/util/common/PeekingReiterator.java |   99 -
 .../beam/sdk/util/common/ReflectHelpers.java    |   23 +-
 .../beam/sdk/util/common/package-info.java      |    3 +-
 .../org/apache/beam/sdk/util/gcsfs/GcsPath.java |   51 +-
 .../beam/sdk/util/gcsfs/package-info.java       |    3 +-
 .../org/apache/beam/sdk/util/package-info.java  |    3 +-
 .../CopyOnAccessInMemoryStateInternals.java     |    4 +-
 .../sdk/util/state/StateInternalsFactory.java   |   36 +
 .../beam/sdk/util/state/StateMerging.java       |   24 +-
 .../beam/sdk/util/state/StateNamespaces.java    |    2 +-
 .../apache/beam/sdk/util/state/StateTable.java  |   12 +-
 .../apache/beam/sdk/util/state/StateTags.java   |    6 +-
 .../beam/sdk/util/state/package-info.java       |   22 +
 .../org/apache/beam/sdk/values/PCollection.java |   34 +-
 .../apache/beam/sdk/values/PCollectionList.java |    2 +-
 .../org/apache/beam/sdk/values/POutput.java     |    2 +-
 .../beam/sdk/values/TimestampedValue.java       |    1 -
 .../org/apache/beam/sdk/values/TupleTag.java    |    2 +-
 .../apache/beam/sdk/values/TypeDescriptors.java |   13 +
 .../dataflow/util/GcsPathValidatorTest.java     |  104 +
 .../java/org/apache/beam/sdk/PipelineTest.java  |    2 +-
 .../org/apache/beam/sdk/WindowMatchers.java     |   80 +-
 .../org/apache/beam/sdk/WindowMatchersTest.java |   84 +
 .../apache/beam/sdk/coders/AvroCoderTest.java   |   54 +-
 .../beam/sdk/coders/BigDecimalCoderTest.java    |   15 +-
 .../beam/sdk/coders/BigIntegerCoderTest.java    |   14 +-
 .../beam/sdk/coders/ByteArrayCoderTest.java     |    5 +-
 .../beam/sdk/coders/CoderRegistryTest.java      |   22 +-
 .../beam/sdk/coders/DelegateCoderTest.java      |   43 +
 .../apache/beam/sdk/coders/EntityCoderTest.java |  110 -
 .../apache/beam/sdk/coders/JAXBCoderTest.java   |   69 +-
 .../beam/sdk/coders/NullableCoderTest.java      |   26 +-
 .../beam/sdk/coders/SerializableCoderTest.java  |   15 +-
 .../sdk/coders/protobuf/ProtobufUtilTest.java   |    7 +-
 .../beam/sdk/io/AvroIOGeneratedClassTest.java   |  192 +-
 .../java/org/apache/beam/sdk/io/AvroIOTest.java |   53 +-
 .../org/apache/beam/sdk/io/AvroSourceTest.java  |   20 +-
 .../org/apache/beam/sdk/io/BigQueryIOTest.java  | 1128 ----
 .../io/BoundedReadFromUnboundedSourceTest.java  |    1 +
 .../beam/sdk/io/CompressedSourceTest.java       |   86 +
 .../apache/beam/sdk/io/CountingInputTest.java   |    3 +-
 .../apache/beam/sdk/io/CountingSourceTest.java  |    2 +-
 .../org/apache/beam/sdk/io/DatastoreIOTest.java |  621 ---
 .../apache/beam/sdk/io/FileBasedSourceTest.java |    5 +-
 .../beam/sdk/io/OffsetBasedSourceTest.java      |    6 +-
 .../org/apache/beam/sdk/io/PubsubIOTest.java    |   34 +-
 .../beam/sdk/io/PubsubUnboundedSinkTest.java    |    2 +-
 .../java/org/apache/beam/sdk/io/ReadTest.java   |    1 +
 .../java/org/apache/beam/sdk/io/TextIOTest.java |  279 +-
 .../java/org/apache/beam/sdk/io/WriteTest.java  |  152 +-
 .../org/apache/beam/sdk/io/XmlSinkTest.java     |    1 +
 .../org/apache/beam/sdk/io/XmlSourceTest.java   |   19 +-
 .../sdk/io/range/ByteKeyRangeTrackerTest.java   |   58 +-
 .../sdk/io/range/OffsetRangeTrackerTest.java    |   91 +-
 .../apache/beam/sdk/options/GcpOptionsTest.java |   26 +
 .../sdk/options/GoogleApiDebugOptionsTest.java  |    1 -
 .../sdk/options/PipelineOptionsFactoryTest.java |    1 -
 .../beam/sdk/options/PipelineOptionsTest.java   |    1 -
 .../sdk/options/ProxyInvocationHandlerTest.java |    2 +-
 .../AggregatorPipelineExtractorTest.java        |    6 +-
 .../beam/sdk/runners/TransformTreeTest.java     |    6 +-
 .../beam/sdk/testing/CoderPropertiesTest.java   |   26 +
 .../sdk/testing/DataflowJUnitTestRunner.java    |  130 -
 .../apache/beam/sdk/testing/PAssertTest.java    |  117 +-
 .../beam/sdk/testing/PaneExtractorsTest.java    |  323 ++
 .../beam/sdk/testing/StaticWindowsTest.java     |   94 +
 .../beam/sdk/testing/TestPipelineTest.java      |    1 -
 .../beam/sdk/testing/WindowSupplierTest.java    |   89 +
 .../transforms/ApproximateQuantilesTest.java    |    1 +
 .../sdk/transforms/ApproximateUniqueTest.java   |    3 +-
 .../beam/sdk/transforms/CombineFnsTest.java     |    4 +-
 .../apache/beam/sdk/transforms/CombineTest.java |   46 +-
 .../apache/beam/sdk/transforms/CreateTest.java  |    6 +-
 .../beam/sdk/transforms/DoFnContextTest.java    |   69 -
 .../DoFnDelegatingAggregatorTest.java           |   16 +-
 .../beam/sdk/transforms/DoFnReflectorTest.java  |  340 +-
 .../apache/beam/sdk/transforms/DoFnTest.java    |   96 +-
 .../beam/sdk/transforms/DoFnTesterTest.java     |   78 +-
 .../sdk/transforms/DoFnWithContextTest.java     |  237 -
 .../sdk/transforms/FlatMapElementsTest.java     |   48 +
 .../apache/beam/sdk/transforms/FlattenTest.java |    4 +-
 .../beam/sdk/transforms/GroupByKeyTest.java     |    6 +-
 .../IntraBundleParallelizationTest.java         |   23 +-
 .../beam/sdk/transforms/MapElementsTest.java    |  107 +
 .../org/apache/beam/sdk/transforms/MaxTest.java |    1 +
 .../org/apache/beam/sdk/transforms/MinTest.java |    2 +
 .../apache/beam/sdk/transforms/NoOpDoFn.java    |  144 -
 .../apache/beam/sdk/transforms/NoOpOldDoFn.java |  144 +
 .../beam/sdk/transforms/OldDoFnContextTest.java |   69 +
 .../apache/beam/sdk/transforms/OldDoFnTest.java |  242 +
 .../beam/sdk/transforms/PTransformTest.java     |    7 +-
 .../apache/beam/sdk/transforms/ParDoTest.java   |  136 +-
 .../beam/sdk/transforms/PartitionTest.java      |    1 +
 .../apache/beam/sdk/transforms/SampleTest.java  |    7 +-
 .../org/apache/beam/sdk/transforms/SumTest.java |   33 +
 .../org/apache/beam/sdk/transforms/TopTest.java |    1 +
 .../apache/beam/sdk/transforms/ViewTest.java    |  400 +-
 .../beam/sdk/transforms/WithTimestampsTest.java |    4 +-
 .../display/DisplayDataEvaluator.java           |   41 +-
 .../display/DisplayDataEvaluatorTest.java       |   18 +-
 .../transforms/display/DisplayDataMatchers.java |   22 +
 .../display/DisplayDataMatchersTest.java        |   17 +-
 .../sdk/transforms/display/DisplayDataTest.java |   21 +-
 .../dofnreflector/DoFnReflectorTestHelper.java  |  116 +
 .../sdk/transforms/join/CoGroupByKeyTest.java   |   26 +-
 .../windowing/AfterProcessingTimeTest.java      |    3 +-
 .../windowing/AfterWatermarkTest.java           |   24 +-
 .../sdk/transforms/windowing/NeverTest.java     |    1 +
 .../sdk/transforms/windowing/WindowTest.java    |   10 +-
 .../sdk/transforms/windowing/WindowingTest.java |   27 +-
 .../apache/beam/sdk/util/ApiSurfaceTest.java    |    4 +-
 .../org/apache/beam/sdk/util/AvroUtilsTest.java |   78 -
 .../beam/sdk/util/BatchTimerInternalsTest.java  |  118 -
 .../beam/sdk/util/BigQueryServicesImplTest.java |  303 --
 .../sdk/util/BigQueryTableInserterTest.java     |  306 --
 .../sdk/util/BigQueryTableRowIteratorTest.java  |  256 -
 .../apache/beam/sdk/util/BigQueryUtilTest.java  |  481 --
 .../beam/sdk/util/BucketingFunctionTest.java    |    4 +-
 .../beam/sdk/util/FileIOChannelFactoryTest.java |   26 +-
 .../beam/sdk/util/GatherAllPanesTest.java       |   16 +-
 .../sdk/util/GroupAlsoByWindowsProperties.java  |  619 ---
 ...oupAlsoByWindowsViaOutputBufferDoFnTest.java |  106 -
 .../beam/sdk/util/IOChannelUtilsTest.java       |   18 +-
 .../sdk/util/IdentitySideInputWindowFn.java     |    3 +-
 .../util/LateDataDroppingDoFnRunnerTest.java    |  117 -
 .../sdk/util/MergingActiveWindowSetTest.java    |    6 +-
 .../beam/sdk/util/MovingFunctionTest.java       |    4 +-
 .../util/PushbackSideInputDoFnRunnerTest.java   |  234 -
 .../beam/sdk/util/ReduceFnRunnerTest.java       | 1448 ------
 .../apache/beam/sdk/util/ReduceFnTester.java    |  784 ---
 .../util/RetryHttpRequestInitializerTest.java   |   19 +-
 .../beam/sdk/util/SerializableUtilsTest.java    |    1 -
 .../apache/beam/sdk/util/SerializerTest.java    |    1 -
 .../beam/sdk/util/SimpleDoFnRunnerTest.java     |   86 -
 .../apache/beam/sdk/util/StringUtilsTest.java   |   35 +-
 .../org/apache/beam/sdk/util/TriggerTester.java |   31 +-
 .../beam/sdk/util/common/CounterTest.java       |    1 +
 .../beam/sdk/util/common/CounterTestUtils.java  |   57 -
 .../beam/sdk/util/state/StateTagTest.java       |    5 +-
 .../beam/sdk/values/PCollectionTupleTest.java   |    2 +-
 .../apache/beam/sdk/values/TypedPValueTest.java |   20 +-
 sdks/java/extensions/join-library/pom.xml       |   11 +-
 .../beam/sdk/extensions/joinlibrary/Join.java   |   25 +-
 .../extensions/joinlibrary/package-info.java    |   23 +
 sdks/java/extensions/pom.xml                    |    2 +-
 sdks/java/io/google-cloud-platform/pom.xml      |  151 +-
 .../sdk/io/gcp/bigquery/BigQueryAvroUtils.java  |  236 +
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    | 2750 ++++++++++
 .../sdk/io/gcp/bigquery/BigQueryServices.java   |  191 +
 .../io/gcp/bigquery/BigQueryServicesImpl.java   |  722 +++
 .../io/gcp/bigquery/BigQueryTableInserter.java  |  221 +
 .../gcp/bigquery/BigQueryTableRowIterator.java  |  474 ++
 .../beam/sdk/io/gcp/bigquery/package-info.java  |   24 +
 .../beam/sdk/io/gcp/bigtable/BigtableIO.java    |  397 +-
 .../sdk/io/gcp/bigtable/BigtableService.java    |   10 +-
 .../io/gcp/bigtable/BigtableServiceImpl.java    |   68 +-
 .../io/gcp/bigtable/BigtableTestOptions.java    |   37 +
 .../beam/sdk/io/gcp/datastore/DatastoreIO.java  |   41 +
 .../beam/sdk/io/gcp/datastore/V1Beta3.java      |  969 ++++
 .../beam/sdk/io/gcp/datastore/package-info.java |   24 +
 .../io/gcp/bigquery/BigQueryAvroUtilsTest.java  |  143 +
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     | 1438 ++++++
 .../gcp/bigquery/BigQueryServicesImplTest.java  |  359 ++
 .../gcp/bigquery/BigQueryTableInserterTest.java |  249 +
 .../bigquery/BigQueryTableRowIteratorTest.java  |  256 +
 .../sdk/io/gcp/bigquery/BigQueryUtilTest.java   |  454 ++
 .../sdk/io/gcp/bigtable/BigtableIOTest.java     |  170 +-
 .../sdk/io/gcp/bigtable/BigtableReadIT.java     |   60 +
 .../sdk/io/gcp/bigtable/BigtableWriteIT.java    |  214 +
 .../sdk/io/gcp/datastore/V1Beta3ReadIT.java     |  114 +
 .../beam/sdk/io/gcp/datastore/V1Beta3Test.java  |  587 +++
 .../io/gcp/datastore/V1Beta3TestOptions.java    |   44 +
 .../sdk/io/gcp/datastore/V1Beta3TestUtil.java   |  382 ++
 .../sdk/io/gcp/datastore/V1Beta3WriteIT.java    |   85 +
 sdks/java/io/hdfs/pom.xml                       |   64 +-
 .../beam/sdk/io/hdfs/AvroHDFSFileSource.java    |  145 +
 .../beam/sdk/io/hdfs/AvroWrapperCoder.java      |  116 +
 .../apache/beam/sdk/io/hdfs/HDFSFileSink.java   |  277 +
 .../apache/beam/sdk/io/hdfs/HDFSFileSource.java |   63 +-
 .../apache/beam/sdk/io/hdfs/WritableCoder.java  |    9 +-
 .../apache/beam/sdk/io/hdfs/package-info.java   |   22 +
 .../SimpleAuthAvroHDFSFileSource.java           |   84 +
 .../hdfs/simpleauth/SimpleAuthHDFSFileSink.java |  132 +
 .../simpleauth/SimpleAuthHDFSFileSource.java    |  122 +
 .../sdk/io/hdfs/simpleauth/package-info.java    |   22 +
 .../beam/sdk/io/hdfs/AvroWrapperCoderTest.java  |   52 +
 .../beam/sdk/io/hdfs/WritableCoderTest.java     |    9 +
 sdks/java/io/jms/pom.xml                        |  134 +
 .../beam/sdk/io/jms/JmsCheckpointMark.java      |   82 +
 .../java/org/apache/beam/sdk/io/jms/JmsIO.java  |  516 ++
 .../org/apache/beam/sdk/io/jms/JmsRecord.java   |  153 +
 .../apache/beam/sdk/io/jms/package-info.java    |   22 +
 .../org/apache/beam/sdk/io/jms/JmsIOTest.java   |  145 +
 sdks/java/io/kafka/pom.xml                      |   36 +-
 .../org/apache/beam/sdk/io/kafka/KafkaIO.java   |   24 +-
 .../apache/beam/sdk/io/kafka/package-info.java  |   22 +
 .../apache/beam/sdk/io/kafka/KafkaIOTest.java   |   16 +-
 sdks/java/io/pom.xml                            |    3 +-
 sdks/java/java8tests/pom.xml                    |   17 +-
 .../transforms/RemoveDuplicatesJava8Test.java   |   10 +-
 .../beam/sdk/transforms/WithKeysJava8Test.java  |   11 +-
 .../sdk/transforms/WithTimestampsJava8Test.java |    4 +-
 sdks/java/maven-archetypes/examples/pom.xml     |    2 +-
 .../main/resources/archetype-resources/pom.xml  |    7 +
 .../src/main/java/DebuggingWordCount.java       |    6 +-
 .../src/main/java/MinimalWordCount.java         |    6 +-
 .../src/main/java/WindowedWordCount.java        |    8 +-
 .../src/main/java/WordCount.java                |   10 +-
 .../main/java/common/DataflowExampleUtils.java  |    5 +-
 .../main/java/common/PubsubFileInjector.java    |    4 +-
 .../projects/basic/archetype.properties         |    2 +-
 sdks/java/maven-archetypes/pom.xml              |    2 +-
 sdks/java/maven-archetypes/starter/pom.xml      |    8 +-
 .../src/main/java/StarterPipeline.java          |    6 +-
 .../projects/basic/archetype.properties         |    2 +-
 .../resources/projects/basic/reference/pom.xml  |    2 +-
 .../src/main/java/it/pkg/StarterPipeline.java   |    6 +-
 sdks/java/microbenchmarks/README.md             |   42 +
 sdks/java/microbenchmarks/pom.xml               |  110 +
 .../coders/AvroCoderBenchmark.java              |  121 +
 .../coders/ByteArrayCoderBenchmark.java         |   66 +
 .../coders/CoderBenchmarking.java               |   42 +
 .../coders/StringUtf8CoderBenchmark.java        |   72 +
 .../microbenchmarks/coders/package-info.java    |   22 +
 .../transforms/DoFnReflectorBenchmark.java      |  243 +
 .../transforms/package-info.java                |   22 +
 sdks/java/pom.xml                               |    3 +-
 sdks/pom.xml                                    |    2 +-
 699 files changed, 38013 insertions(+), 35273 deletions(-)
----------------------------------------------------------------------




[02/51] [abbrv] incubator-beam git commit: Rename DoFn to OldDoFn

Posted by ke...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
index 3b314b2..8b00c03 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
@@ -142,9 +142,9 @@ public class DoFnWithContextTest implements Serializable {
   @Test
   public void testDoFnWithContextUsingAggregators() {
     NoOpDoFn<Object, Object> noOpFn = new NoOpDoFn<>();
-    DoFn<Object, Object>.Context context = noOpFn.context();
+    OldDoFn<Object, Object>.Context context = noOpFn.context();
 
-    DoFn<Object, Object> fn = spy(noOpFn);
+    OldDoFn<Object, Object> fn = spy(noOpFn);
     context = spy(context);
 
     @SuppressWarnings("unchecked")
@@ -225,7 +225,7 @@ public class DoFnWithContextTest implements Serializable {
   }
 
   /**
-   * Initialize a test pipeline with the specified {@link DoFn}.
+   * Initialize a test pipeline with the specified {@link OldDoFn}.
    */
   private <InputT, OutputT> TestPipeline createTestPipeline(DoFnWithContext<InputT, OutputT> fn) {
     TestPipeline pipeline = TestPipeline.create();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
index 80825cb..b81eedb 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
@@ -130,7 +130,7 @@ public class FlattenTest implements Serializable {
 
     PCollection<String> output = p
         .apply(Create.of((Void) null).withCoder(VoidCoder.of()))
-        .apply(ParDo.withSideInputs(view).of(new DoFn<Void, String>() {
+        .apply(ParDo.withSideInputs(view).of(new OldDoFn<Void, String>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     for (String side : c.sideInput(view)) {
@@ -339,7 +339,7 @@ public class FlattenTest implements Serializable {
 
   /////////////////////////////////////////////////////////////////////////////
 
-  private static class IdentityFn<T> extends DoFn<T, T> {
+  private static class IdentityFn<T> extends OldDoFn<T, T> {
     @Override
     public void processElement(ProcessContext c) {
       c.output(c.element());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
index d6e4589..15c3ba8 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.transforms;
 
 import static org.apache.beam.sdk.TestUtils.KvMatcher.isKv;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.hasItem;
 import static org.hamcrest.Matchers.empty;
@@ -55,7 +56,6 @@ import com.google.common.base.Function;
 import com.google.common.collect.Iterables;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
-
 import org.joda.time.Duration;
 import org.joda.time.Instant;
 import org.junit.Assert;
@@ -371,7 +371,7 @@ public class GroupByKeyTest {
     pipeline.run();
   }
 
-  private static class AssertTimestamp<K, V> extends DoFn<KV<K, V>, Void> {
+  private static class AssertTimestamp<K, V> extends OldDoFn<KV<K, V>, Void> {
     private final Instant timestamp;
 
     public AssertTimestamp(Instant timestamp) {
@@ -506,7 +506,7 @@ public class GroupByKeyTest {
    * Creates a KV that wraps the original KV together with a random key.
    */
   static class AssignRandomKey
-      extends DoFn<KV<BadEqualityKey, Long>, KV<Long, KV<BadEqualityKey, Long>>> {
+      extends OldDoFn<KV<BadEqualityKey, Long>, KV<Long, KV<BadEqualityKey, Long>>> {
 
     @Override
     public void processElement(ProcessContext c) throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java
index 3355aeb..fa2fae9 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java
@@ -20,6 +20,7 @@ package org.apache.beam.sdk.transforms;
 import static org.apache.beam.sdk.testing.SystemNanoTimeSleeper.sleepMillis;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
 import static org.hamcrest.Matchers.both;
 import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@@ -74,7 +75,7 @@ public class IntraBundleParallelizationTest {
   /**
    * Introduces a delay in processing, then passes thru elements.
    */
-  private static class DelayFn<T> extends DoFn<T, T> {
+  private static class DelayFn<T> extends OldDoFn<T, T> {
     public static final long DELAY_MS = 25;
 
     @Override
@@ -94,7 +95,7 @@ public class IntraBundleParallelizationTest {
   /**
    * Throws an exception after some number of calls.
    */
-  private static class ExceptionThrowingFn<T> extends DoFn<T, T> {
+  private static class ExceptionThrowingFn<T> extends OldDoFn<T, T> {
     private ExceptionThrowingFn(int numSuccesses) {
       IntraBundleParallelizationTest.numSuccesses.set(numSuccesses);
     }
@@ -120,11 +121,11 @@ public class IntraBundleParallelizationTest {
   /**
    * Measures concurrency of the processElement method.
    */
-  private static class ConcurrencyMeasuringFn<T> extends DoFn<T, T> {
+  private static class ConcurrencyMeasuringFn<T> extends OldDoFn<T, T> {
     @Override
     public void processElement(ProcessContext c) {
       // Synchronize on the class to provide synchronous access irrespective of
-      // how this DoFn is called.
+      // how this OldDoFn is called.
       synchronized (ConcurrencyMeasuringFn.class) {
         concurrentElements++;
         if (concurrentElements > maxDownstreamConcurrency) {
@@ -154,8 +155,8 @@ public class IntraBundleParallelizationTest {
   }
 
   /**
-   * Test that the DoFn is parallelized up the the Max Parallelism factor within a bundle, but not
-   * greater than that amount.
+   * Test that the OldDoFn is parallelized up the the Max Parallelism factor within a bundle, but
+   * not greater than that amount.
    */
   @Test
   @Category(NeedsRunner.class)
@@ -224,7 +225,7 @@ public class IntraBundleParallelizationTest {
 
   @Test
   public void testDisplayData() {
-    DoFn<String, String> fn = new DoFn<String, String>() {
+    OldDoFn<String, String> fn = new OldDoFn<String, String>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {
       }
@@ -248,15 +249,15 @@ public class IntraBundleParallelizationTest {
   /**
    * Runs the provided doFn inside of an {@link IntraBundleParallelization} transform.
    *
-   * <p>This method assumes that the DoFn passed to it will call {@link #startConcurrentCall()}
+   * <p>This method assumes that the OldDoFn passed to it will call {@link #startConcurrentCall()}
    * before processing each elements and {@link #finishConcurrentCall()} after each element.
    *
    * @param numElements the size of the input
    * @param maxParallelism how many threads to execute in parallel
-   * @param doFn the DoFn to execute
-   * @return the maximum observed parallelism of the DoFn
+   * @param doFn the OldDoFn to execute
+   * @return the maximum observed parallelism of the OldDoFn
    */
-  private int run(int numElements, int maxParallelism, DoFn<Integer, Integer> doFn) {
+  private int run(int numElements, int maxParallelism, OldDoFn<Integer, Integer> doFn) {
     Pipeline pipeline = TestPipeline.create();
 
     ArrayList<Integer> data = new ArrayList<>(numElements);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
index f18504c..b4751d2 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
@@ -18,6 +18,7 @@
 package org.apache.beam.sdk.transforms;
 
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.hasItem;
 import static org.junit.Assert.assertThat;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java
index 226255a..87fa554 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.transforms;
 
 import static org.apache.beam.sdk.TestUtils.checkCombineFn;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertEquals;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java
index d7ec322..cd03a74 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java
@@ -20,10 +20,12 @@ package org.apache.beam.sdk.transforms;
 
 import static org.apache.beam.sdk.TestUtils.checkCombineFn;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertEquals;
 
 import org.apache.beam.sdk.transforms.display.DisplayData;
+
 import com.google.common.collect.Lists;
 
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java
index a389fac..5c43755 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java
@@ -28,35 +28,35 @@ import org.apache.beam.sdk.values.TupleTag;
 import org.joda.time.Instant;
 
 /**
- * A {@link DoFn} that does nothing with provided elements. Used for testing
- * methods provided by the DoFn abstract class.
+ * A {@link OldDoFn} that does nothing with provided elements. Used for testing
+ * methods provided by the OldDoFn abstract class.
  *
  * @param <InputT> unused.
  * @param <OutputT> unused.
  */
-class NoOpDoFn<InputT, OutputT> extends DoFn<InputT, OutputT> {
+class NoOpDoFn<InputT, OutputT> extends OldDoFn<InputT, OutputT> {
   @Override
-  public void processElement(DoFn<InputT, OutputT>.ProcessContext c) throws Exception {
+  public void processElement(OldDoFn<InputT, OutputT>.ProcessContext c) throws Exception {
   }
 
   /**
    * Returns a new NoOp Context.
    */
-  public DoFn<InputT, OutputT>.Context context() {
+  public OldDoFn<InputT, OutputT>.Context context() {
     return new NoOpDoFnContext();
   }
 
   /**
    * Returns a new NoOp Process Context.
    */
-  public DoFn<InputT, OutputT>.ProcessContext processContext() {
+  public OldDoFn<InputT, OutputT>.ProcessContext processContext() {
     return new NoOpDoFnProcessContext();
   }
 
   /**
-   * A {@link DoFn.Context} that does nothing and returns exclusively null.
+   * A {@link OldDoFn.Context} that does nothing and returns exclusively null.
    */
-  private class NoOpDoFnContext extends DoFn<InputT, OutputT>.Context {
+  private class NoOpDoFnContext extends OldDoFn<InputT, OutputT>.Context {
     @Override
     public PipelineOptions getPipelineOptions() {
       return null;
@@ -82,10 +82,10 @@ class NoOpDoFn<InputT, OutputT> extends DoFn<InputT, OutputT> {
   }
 
   /**
-   * A {@link DoFn.ProcessContext} that does nothing and returns exclusively
+   * A {@link OldDoFn.ProcessContext} that does nothing and returns exclusively
    * null.
    */
-  private class NoOpDoFnProcessContext extends DoFn<InputT, OutputT>.ProcessContext {
+  private class NoOpDoFnProcessContext extends OldDoFn<InputT, OutputT>.ProcessContext {
     @Override
     public InputT element() {
       return null;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java
new file mode 100644
index 0000000..9234ccb
--- /dev/null
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.transforms;
+
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+/**
+ * Tests for {@link OldDoFn.Context}.
+ */
+@RunWith(JUnit4.class)
+public class OldDoFnContextTest {
+
+  @Mock
+  private Aggregator<Long, Long> agg;
+
+  private OldDoFn<Object, Object> fn;
+  private OldDoFn<Object, Object>.Context context;
+
+  @Before
+  public void setup() {
+    MockitoAnnotations.initMocks(this);
+
+    // Need to be real objects to call the constructor, and to reference the
+    // outer instance of OldDoFn
+    NoOpDoFn<Object, Object> noOpFn = new NoOpDoFn<>();
+    OldDoFn<Object, Object>.Context noOpContext = noOpFn.context();
+
+    fn = spy(noOpFn);
+    context = spy(noOpContext);
+  }
+
+  @Test
+  public void testSetupDelegateAggregatorsCreatesAndLinksDelegateAggregators() {
+    Sum.SumLongFn combiner = new Sum.SumLongFn();
+    Aggregator<Long, Long> delegateAggregator =
+        fn.createAggregator("test", combiner);
+
+    when(context.createAggregatorInternal("test", combiner)).thenReturn(agg);
+
+    context.setupDelegateAggregators();
+    delegateAggregator.addValue(1L);
+
+    verify(agg).addValue(1L);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java
new file mode 100644
index 0000000..49f4366
--- /dev/null
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java
@@ -0,0 +1,242 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.transforms;
+
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.isA;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertThat;
+
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.Pipeline.PipelineExecutionException;
+import org.apache.beam.sdk.PipelineResult;
+import org.apache.beam.sdk.runners.AggregatorValues;
+import org.apache.beam.sdk.testing.NeedsRunner;
+import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.Combine.CombineFn;
+import org.apache.beam.sdk.transforms.Max.MaxIntegerFn;
+import org.apache.beam.sdk.transforms.Sum.SumIntegerFn;
+import org.apache.beam.sdk.transforms.display.DisplayData;
+
+import com.google.common.collect.ImmutableMap;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.io.Serializable;
+import java.util.Map;
+
+/**
+ * Tests for OldDoFn.
+ */
+@RunWith(JUnit4.class)
+public class OldDoFnTest implements Serializable {
+
+  @Rule
+  public transient ExpectedException thrown = ExpectedException.none();
+
+  @Test
+  public void testCreateAggregatorWithCombinerSucceeds() {
+    String name = "testAggregator";
+    Sum.SumLongFn combiner = new Sum.SumLongFn();
+
+    OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+    Aggregator<Long, Long> aggregator = doFn.createAggregator(name, combiner);
+
+    assertEquals(name, aggregator.getName());
+    assertEquals(combiner, aggregator.getCombineFn());
+  }
+
+  @Test
+  public void testCreateAggregatorWithNullNameThrowsException() {
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("name cannot be null");
+
+    OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+    doFn.createAggregator(null, new Sum.SumLongFn());
+  }
+
+  @Test
+  public void testCreateAggregatorWithNullCombineFnThrowsException() {
+    CombineFn<Object, Object, Object> combiner = null;
+
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("combiner cannot be null");
+
+    OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+    doFn.createAggregator("testAggregator", combiner);
+  }
+
+  @Test
+  public void testCreateAggregatorWithNullSerializableFnThrowsException() {
+    SerializableFunction<Iterable<Object>, Object> combiner = null;
+
+    thrown.expect(NullPointerException.class);
+    thrown.expectMessage("combiner cannot be null");
+
+    OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+    doFn.createAggregator("testAggregator", combiner);
+  }
+
+  @Test
+  public void testCreateAggregatorWithSameNameThrowsException() {
+    String name = "testAggregator";
+    CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
+
+    OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+    doFn.createAggregator(name, combiner);
+
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Cannot create");
+    thrown.expectMessage(name);
+    thrown.expectMessage("already exists");
+
+    doFn.createAggregator(name, combiner);
+  }
+
+  @Test
+  public void testCreateAggregatorsWithDifferentNamesSucceeds() {
+    String nameOne = "testAggregator";
+    String nameTwo = "aggregatorPrime";
+    CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
+
+    OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+    Aggregator<Double, Double> aggregatorOne =
+        doFn.createAggregator(nameOne, combiner);
+    Aggregator<Double, Double> aggregatorTwo =
+        doFn.createAggregator(nameTwo, combiner);
+
+    assertNotEquals(aggregatorOne, aggregatorTwo);
+  }
+
+  @Test
+  @Category(NeedsRunner.class)
+  public void testCreateAggregatorInStartBundleThrows() {
+    TestPipeline p = createTestPipeline(new OldDoFn<String, String>() {
+      @Override
+      public void startBundle(OldDoFn<String, String>.Context c) throws Exception {
+        createAggregator("anyAggregate", new MaxIntegerFn());
+      }
+
+      @Override
+      public void processElement(OldDoFn<String, String>.ProcessContext c) throws Exception {}
+    });
+
+    thrown.expect(PipelineExecutionException.class);
+    thrown.expectCause(isA(IllegalStateException.class));
+
+    p.run();
+  }
+
+  @Test
+  @Category(NeedsRunner.class)
+  public void testCreateAggregatorInProcessElementThrows() {
+    TestPipeline p = createTestPipeline(new OldDoFn<String, String>() {
+      @Override
+      public void processElement(ProcessContext c) throws Exception {
+        createAggregator("anyAggregate", new MaxIntegerFn());
+      }
+    });
+
+    thrown.expect(PipelineExecutionException.class);
+    thrown.expectCause(isA(IllegalStateException.class));
+
+    p.run();
+  }
+
+  @Test
+  @Category(NeedsRunner.class)
+  public void testCreateAggregatorInFinishBundleThrows() {
+    TestPipeline p = createTestPipeline(new OldDoFn<String, String>() {
+      @Override
+      public void finishBundle(OldDoFn<String, String>.Context c) throws Exception {
+        createAggregator("anyAggregate", new MaxIntegerFn());
+      }
+
+      @Override
+      public void processElement(OldDoFn<String, String>.ProcessContext c) throws Exception {}
+    });
+
+    thrown.expect(PipelineExecutionException.class);
+    thrown.expectCause(isA(IllegalStateException.class));
+
+    p.run();
+  }
+
+  /**
+   * Initialize a test pipeline with the specified {@link OldDoFn}.
+   */
+  private <InputT, OutputT> TestPipeline createTestPipeline(OldDoFn<InputT, OutputT> fn) {
+    TestPipeline pipeline = TestPipeline.create();
+    pipeline.apply(Create.of((InputT) null))
+     .apply(ParDo.of(fn));
+
+    return pipeline;
+  }
+
+  @Test
+  public void testPopulateDisplayDataDefaultBehavior() {
+    OldDoFn<String, String> usesDefault =
+        new OldDoFn<String, String>() {
+          @Override
+          public void processElement(ProcessContext c) throws Exception {}
+        };
+
+    DisplayData data = DisplayData.from(usesDefault);
+    assertThat(data.items(), empty());
+  }
+
+  @Test
+  @Category(NeedsRunner.class)
+  public void testAggregators() throws Exception {
+    Pipeline pipeline = TestPipeline.create();
+
+    CountOddsFn countOdds = new CountOddsFn();
+    pipeline
+        .apply(Create.of(1, 3, 5, 7, 2, 4, 6, 8, 10, 12, 14, 20, 42, 68, 100))
+        .apply(ParDo.of(countOdds));
+    PipelineResult result = pipeline.run();
+
+    AggregatorValues<Integer> values = result.getAggregatorValues(countOdds.aggregator);
+    assertThat(values.getValuesAtSteps(),
+        equalTo((Map<String, Integer>) ImmutableMap.<String, Integer>of("ParDo(CountOdds)", 4)));
+  }
+
+  private static class CountOddsFn extends OldDoFn<Integer, Void> {
+    @Override
+    public void processElement(ProcessContext c) throws Exception {
+      if (c.element() % 2 == 1) {
+        aggregator.addValue(1);
+      }
+    }
+
+    Aggregator<Integer, Integer> aggregator =
+        createAggregator("odds", new SumIntegerFn());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
index 868270c..0a6eab0 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
@@ -26,6 +26,7 @@ import static org.apache.beam.sdk.util.StringUtils.byteArrayToJsonString;
 import static org.apache.beam.sdk.util.StringUtils.jsonStringToByteArray;
 
 import static com.google.common.base.Preconditions.checkNotNull;
+
 import static org.hamcrest.Matchers.allOf;
 import static org.hamcrest.Matchers.anyOf;
 import static org.hamcrest.Matchers.equalTo;
@@ -43,7 +44,7 @@ import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.ParDo.Bound;
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.transforms.display.DisplayData.Builder;
@@ -59,7 +60,6 @@ import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.TupleTagList;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
-
 import org.joda.time.Duration;
 import org.joda.time.Instant;
 import org.junit.Rule;
@@ -89,7 +89,9 @@ public class ParDoTest implements Serializable {
   @Rule
   public transient ExpectedException thrown = ExpectedException.none();
 
-  private static class PrintingDoFn extends DoFn<String, String> implements RequiresWindowAccess {
+  private static class PrintingOldDoFn extends OldDoFn<String, String> implements
+      RequiresWindowAccess {
+
     @Override
     public void processElement(ProcessContext c) {
       c.output(c.element() + ":" + c.timestamp().getMillis()
@@ -97,17 +99,17 @@ public class ParDoTest implements Serializable {
     }
   }
 
-  static class TestDoFn extends DoFn<Integer, String> {
+  static class TestOldDoFn extends OldDoFn<Integer, String> {
     enum State { UNSTARTED, STARTED, PROCESSING, FINISHED }
     State state = State.UNSTARTED;
 
     final List<PCollectionView<Integer>> sideInputViews = new ArrayList<>();
     final List<TupleTag<String>> sideOutputTupleTags = new ArrayList<>();
 
-    public TestDoFn() {
+    public TestOldDoFn() {
     }
 
-    public TestDoFn(List<PCollectionView<Integer>> sideInputViews,
+    public TestOldDoFn(List<PCollectionView<Integer>> sideInputViews,
                     List<TupleTag<String>> sideOutputTupleTags) {
       this.sideInputViews.addAll(sideInputViews);
       this.sideOutputTupleTags.addAll(sideOutputTupleTags);
@@ -161,9 +163,9 @@ public class ParDoTest implements Serializable {
     }
   }
 
-  static class TestNoOutputDoFn extends DoFn<Integer, String> {
+  static class TestNoOutputDoFn extends OldDoFn<Integer, String> {
     @Override
-    public void processElement(DoFn<Integer, String>.ProcessContext c) throws Exception {}
+    public void processElement(OldDoFn<Integer, String>.ProcessContext c) throws Exception {}
   }
 
   static class TestDoFnWithContext extends DoFnWithContext<Integer, String> {
@@ -229,7 +231,7 @@ public class ParDoTest implements Serializable {
     }
   }
 
-  static class TestStartBatchErrorDoFn extends DoFn<Integer, String> {
+  static class TestStartBatchErrorDoFn extends OldDoFn<Integer, String> {
     @Override
     public void startBundle(Context c) {
       throw new RuntimeException("test error in initialize");
@@ -241,14 +243,14 @@ public class ParDoTest implements Serializable {
     }
   }
 
-  static class TestProcessElementErrorDoFn extends DoFn<Integer, String> {
+  static class TestProcessElementErrorDoFn extends OldDoFn<Integer, String> {
     @Override
     public void processElement(ProcessContext c) {
       throw new RuntimeException("test error in process");
     }
   }
 
-  static class TestFinishBatchErrorDoFn extends DoFn<Integer, String> {
+  static class TestFinishBatchErrorDoFn extends OldDoFn<Integer, String> {
     @Override
     public void processElement(ProcessContext c) {
       // This has to be here.
@@ -260,13 +262,13 @@ public class ParDoTest implements Serializable {
     }
   }
 
-  private static class StrangelyNamedDoer extends DoFn<Integer, String> {
+  private static class StrangelyNamedDoer extends OldDoFn<Integer, String> {
     @Override
     public void processElement(ProcessContext c) {
     }
   }
 
-  static class TestOutputTimestampDoFn extends DoFn<Integer, Integer> {
+  static class TestOutputTimestampDoFn extends OldDoFn<Integer, Integer> {
     @Override
     public void processElement(ProcessContext c) {
       Integer value = c.element();
@@ -274,7 +276,7 @@ public class ParDoTest implements Serializable {
     }
   }
 
-  static class TestShiftTimestampDoFn extends DoFn<Integer, Integer> {
+  static class TestShiftTimestampDoFn extends OldDoFn<Integer, Integer> {
     private Duration allowedTimestampSkew;
     private Duration durationToShift;
 
@@ -297,7 +299,7 @@ public class ParDoTest implements Serializable {
     }
   }
 
-  static class TestFormatTimestampDoFn extends DoFn<Integer, String> {
+  static class TestFormatTimestampDoFn extends OldDoFn<Integer, String> {
     @Override
     public void processElement(ProcessContext c) {
       checkNotNull(c.timestamp());
@@ -318,7 +320,7 @@ public class ParDoTest implements Serializable {
       return PCollectionTuple.of(BY2, by2).and(BY3, by3);
     }
 
-    static class FilterFn extends DoFn<Integer, Integer> {
+    static class FilterFn extends OldDoFn<Integer, Integer> {
       private final int divisor;
 
       FilterFn(int divisor) {
@@ -343,7 +345,7 @@ public class ParDoTest implements Serializable {
 
     PCollection<String> output = pipeline
         .apply(Create.of(inputs))
-        .apply(ParDo.of(new TestDoFn()));
+        .apply(ParDo.of(new TestOldDoFn()));
 
     PAssert.that(output)
         .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
@@ -377,7 +379,7 @@ public class ParDoTest implements Serializable {
 
     PCollection<String> output = pipeline
         .apply(Create.of(inputs).withCoder(VarIntCoder.of()))
-        .apply("TestDoFn", ParDo.of(new TestDoFn()));
+        .apply("TestOldDoFn", ParDo.of(new TestOldDoFn()));
 
     PAssert.that(output)
         .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
@@ -395,7 +397,7 @@ public class ParDoTest implements Serializable {
 
     PCollection<String> output = pipeline
         .apply(Create.of(inputs).withCoder(VarIntCoder.of()))
-        .apply("TestDoFn", ParDo.of(new TestNoOutputDoFn()));
+        .apply("TestOldDoFn", ParDo.of(new TestNoOutputDoFn()));
 
     PAssert.that(output).empty();
 
@@ -418,7 +420,7 @@ public class ParDoTest implements Serializable {
     PCollectionTuple outputs = pipeline
         .apply(Create.of(inputs))
         .apply(ParDo
-               .of(new TestDoFn(
+               .of(new TestOldDoFn(
                    Arrays.<PCollectionView<Integer>>asList(),
                    Arrays.asList(sideOutputTag1, sideOutputTag2, sideOutputTag3)))
                .withOutputTags(
@@ -461,7 +463,7 @@ public class ParDoTest implements Serializable {
     PCollectionTuple outputs = pipeline
         .apply(Create.of(inputs))
         .apply(ParDo
-               .of(new TestDoFn(
+               .of(new TestOldDoFn(
                    Arrays.<PCollectionView<Integer>>asList(),
                    Arrays.asList(sideOutputTag1, sideOutputTag2, sideOutputTag3)))
                .withOutputTags(
@@ -527,7 +529,7 @@ public class ParDoTest implements Serializable {
     PCollectionTuple outputs = pipeline
         .apply(Create.of(inputs))
         .apply(ParDo.withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag))
-            .of(new DoFn<Integer, Void>(){
+            .of(new OldDoFn<Integer, Void>(){
                 @Override
                 public void processElement(ProcessContext c) {
                   c.sideOutput(sideOutputTag, c.element());
@@ -550,7 +552,7 @@ public class ParDoTest implements Serializable {
 
     PCollection<String> output = pipeline
         .apply(Create.of(inputs))
-        .apply(ParDo.of(new TestDoFn(
+        .apply(ParDo.of(new TestOldDoFn(
             Arrays.<PCollectionView<Integer>>asList(),
             Arrays.asList(sideTag))));
 
@@ -569,7 +571,7 @@ public class ParDoTest implements Serializable {
 
     // Success for a total of 1000 outputs.
     input
-        .apply("Success1000", ParDo.of(new DoFn<Integer, String>() {
+        .apply("Success1000", ParDo.of(new OldDoFn<Integer, String>() {
             @Override
             public void processElement(ProcessContext c) {
               TupleTag<String> specialSideTag = new TupleTag<String>(){};
@@ -585,7 +587,7 @@ public class ParDoTest implements Serializable {
 
     // Failure for a total of 1001 outputs.
     input
-        .apply("Failure1001", ParDo.of(new DoFn<Integer, String>() {
+        .apply("Failure1001", ParDo.of(new OldDoFn<Integer, String>() {
             @Override
             public void processElement(ProcessContext c) {
               for (int i = 0; i < 1000; i++) {
@@ -618,7 +620,7 @@ public class ParDoTest implements Serializable {
     PCollection<String> output = pipeline
         .apply(Create.of(inputs))
         .apply(ParDo.withSideInputs(sideInput1, sideInputUnread, sideInput2)
-            .of(new TestDoFn(
+            .of(new TestOldDoFn(
                 Arrays.asList(sideInput1, sideInput2),
                 Arrays.<TupleTag<String>>asList())));
 
@@ -652,7 +654,7 @@ public class ParDoTest implements Serializable {
         .apply(ParDo.withSideInputs(sideInput1)
             .withSideInputs(sideInputUnread)
             .withSideInputs(sideInput2)
-            .of(new TestDoFn(
+            .of(new TestOldDoFn(
                 Arrays.asList(sideInput1, sideInput2),
                 Arrays.<TupleTag<String>>asList())));
 
@@ -690,7 +692,7 @@ public class ParDoTest implements Serializable {
             .withSideInputs(sideInputUnread)
             .withSideInputs(sideInput2)
             .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag))
-            .of(new TestDoFn(
+            .of(new TestOldDoFn(
                 Arrays.asList(sideInput1, sideInput2),
                 Arrays.<TupleTag<String>>asList())));
 
@@ -728,7 +730,7 @@ public class ParDoTest implements Serializable {
             .withSideInputs(sideInputUnread)
             .withSideInputs(sideInput2)
             .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag))
-            .of(new TestDoFn(
+            .of(new TestOldDoFn(
                 Arrays.asList(sideInput1, sideInput2),
                 Arrays.<TupleTag<String>>asList())));
 
@@ -752,7 +754,7 @@ public class ParDoTest implements Serializable {
         .apply(View.<Integer>asSingleton());
 
     pipeline.apply("CreateMain", Create.of(inputs))
-        .apply(ParDo.of(new TestDoFn(
+        .apply(ParDo.of(new TestOldDoFn(
             Arrays.<PCollectionView<Integer>>asList(sideView),
             Arrays.<TupleTag<String>>asList())));
 
@@ -815,18 +817,18 @@ public class ParDoTest implements Serializable {
         .setName("MyInput");
 
     {
-      PCollection<String> output1 = input.apply(ParDo.of(new TestDoFn()));
+      PCollection<String> output1 = input.apply(ParDo.of(new TestOldDoFn()));
       assertEquals("ParDo(Test).out", output1.getName());
     }
 
     {
-      PCollection<String> output2 = input.apply("MyParDo", ParDo.of(new TestDoFn()));
+      PCollection<String> output2 = input.apply("MyParDo", ParDo.of(new TestOldDoFn()));
       assertEquals("MyParDo.out", output2.getName());
     }
 
     {
-      PCollection<String> output4 = input.apply("TestDoFn", ParDo.of(new TestDoFn()));
-      assertEquals("TestDoFn.out", output4.getName());
+      PCollection<String> output4 = input.apply("TestOldDoFn", ParDo.of(new TestOldDoFn()));
+      assertEquals("TestOldDoFn.out", output4.getName());
     }
 
     {
@@ -835,7 +837,7 @@ public class ParDoTest implements Serializable {
           output5.getName());
     }
 
-    assertEquals("ParDo(Printing)", ParDo.of(new PrintingDoFn()).getName());
+    assertEquals("ParDo(Printing)", ParDo.of(new PrintingOldDoFn()).getName());
 
     assertEquals(
         "ParMultiDo(SideOutputDummy)",
@@ -855,7 +857,7 @@ public class ParDoTest implements Serializable {
     PCollectionTuple outputs = p
         .apply(Create.of(Arrays.asList(3, -42, 666))).setName("MyInput")
         .apply("MyParDo", ParDo
-               .of(new TestDoFn(
+               .of(new TestOldDoFn(
                    Arrays.<PCollectionView<Integer>>asList(),
                    Arrays.asList(sideOutputTag1, sideOutputTag2, sideOutputTag3)))
                .withOutputTags(
@@ -883,7 +885,7 @@ public class ParDoTest implements Serializable {
         .apply("CustomTransform", new PTransform<PCollection<Integer>, PCollection<String>>() {
             @Override
             public PCollection<String> apply(PCollection<Integer> input) {
-              return input.apply(ParDo.of(new TestDoFn()));
+              return input.apply(ParDo.of(new TestOldDoFn()));
             }
           });
 
@@ -920,7 +922,7 @@ public class ParDoTest implements Serializable {
   @Test
   public void testJsonEscaping() {
     // Declare an arbitrary function and make sure we can serialize it
-    DoFn<Integer, Integer> doFn = new DoFn<Integer, Integer>() {
+    OldDoFn<Integer, Integer> doFn = new OldDoFn<Integer, Integer>() {
       @Override
       public void processElement(ProcessContext c) {
         c.output(c.element() + 1);
@@ -973,7 +975,7 @@ public class ParDoTest implements Serializable {
     }
   }
 
-  private static class SideOutputDummyFn extends DoFn<Integer, Integer> {
+  private static class SideOutputDummyFn extends OldDoFn<Integer, Integer> {
     private TupleTag<TestDummy> sideTag;
     public SideOutputDummyFn(TupleTag<TestDummy> sideTag) {
       this.sideTag = sideTag;
@@ -985,7 +987,7 @@ public class ParDoTest implements Serializable {
      }
   }
 
-  private static class MainOutputDummyFn extends DoFn<Integer, TestDummy> {
+  private static class MainOutputDummyFn extends OldDoFn<Integer, TestDummy> {
     private TupleTag<Integer> sideTag;
     public MainOutputDummyFn(TupleTag<Integer> sideTag) {
       this.sideTag = sideTag;
@@ -1167,7 +1169,7 @@ public class ParDoTest implements Serializable {
         .apply(ParDo
             .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag))
             .of(
-                new DoFn<TestDummy, TestDummy>() {
+                new OldDoFn<TestDummy, TestDummy>() {
                   @Override public void processElement(ProcessContext context) {
                     TestDummy element = context.element();
                     context.output(element);
@@ -1181,7 +1183,7 @@ public class ParDoTest implements Serializable {
     // on a missing coder.
     tuple.get(mainOutputTag)
         .setCoder(TestDummyCoder.of())
-        .apply("Output1", ParDo.of(new DoFn<TestDummy, Integer>() {
+        .apply("Output1", ParDo.of(new OldDoFn<TestDummy, Integer>() {
           @Override public void processElement(ProcessContext context) {
             context.output(1);
           }
@@ -1228,7 +1230,7 @@ public class ParDoTest implements Serializable {
     PCollection<String> output =
         input
         .apply(ParDo.withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag)).of(
-            new DoFn<Integer, Integer>() {
+            new OldDoFn<Integer, Integer>() {
               @Override
               public void processElement(ProcessContext c) {
                 c.sideOutputWithTimestamp(
@@ -1349,7 +1351,7 @@ public class ParDoTest implements Serializable {
     PCollection<String> output = pipeline
         .apply(Create.timestamped(TimestampedValue.of("elem", new Instant(1))))
         .apply(Window.<String>into(FixedWindows.of(Duration.millis(1))))
-        .apply(ParDo.of(new DoFn<String, String>() {
+        .apply(ParDo.of(new OldDoFn<String, String>() {
                   @Override
                   public void startBundle(Context c) {
                     c.outputWithTimestamp("start", new Instant(2));
@@ -1368,7 +1370,7 @@ public class ParDoTest implements Serializable {
                     System.out.println("Finish: 3");
                   }
                 }))
-        .apply(ParDo.of(new PrintingDoFn()));
+        .apply(ParDo.of(new PrintingOldDoFn()));
 
     PAssert.that(output).satisfies(new Checker());
 
@@ -1383,7 +1385,7 @@ public class ParDoTest implements Serializable {
     pipeline
         .apply(Create.timestamped(TimestampedValue.of("elem", new Instant(1))))
         .apply(Window.<String>into(FixedWindows.of(Duration.millis(1))))
-        .apply(ParDo.of(new DoFn<String, String>() {
+        .apply(ParDo.of(new OldDoFn<String, String>() {
                   @Override
                   public void startBundle(Context c) {
                     c.output("start");
@@ -1400,7 +1402,7 @@ public class ParDoTest implements Serializable {
   }
   @Test
   public void testDoFnDisplayData() {
-    DoFn<String, String> fn = new DoFn<String, String>() {
+    OldDoFn<String, String> fn = new OldDoFn<String, String>() {
       @Override
       public void processElement(ProcessContext c) {
       }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java
index 243b52b..0cc804e 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java
@@ -18,6 +18,7 @@
 package org.apache.beam.sdk.transforms;
 
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java
index fe02573..e7f8cd0 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java
@@ -22,6 +22,7 @@ import static org.apache.beam.sdk.TestUtils.NO_LINES;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
 
 import static com.google.common.base.Preconditions.checkArgument;
+
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java
index a96d19b..fc0e659 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java
@@ -18,6 +18,7 @@
 package org.apache.beam.sdk.transforms;
 
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertEquals;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java
index 738b492..ee240bf 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java
@@ -18,6 +18,7 @@
 package org.apache.beam.sdk.transforms;
 
 import static com.google.common.base.Preconditions.checkArgument;
+
 import static org.hamcrest.Matchers.isA;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
@@ -98,12 +99,13 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> output =
         pipeline.apply("Create123", Create.of(1, 2, 3))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                c.output(c.sideInput(view));
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    c.output(c.sideInput(view));
+                  }
+                }));
 
     PAssert.that(output).containsInAnyOrder(47, 47, 47);
 
@@ -124,16 +126,17 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> output =
         pipeline.apply("Create123", Create.timestamped(
-                                        TimestampedValue.of(1, new Instant(4)),
-                                        TimestampedValue.of(2, new Instant(8)),
-                                        TimestampedValue.of(3, new Instant(12))))
+            TimestampedValue.of(1, new Instant(4)),
+            TimestampedValue.of(2, new Instant(8)),
+            TimestampedValue.of(3, new Instant(12))))
             .apply("MainWindowInto", Window.<Integer>into(FixedWindows.of(Duration.millis(10))))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                c.output(c.sideInput(view));
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    c.output(c.sideInput(view));
+                  }
+                }));
 
     PAssert.that(output).containsInAnyOrder(47, 47, 48);
 
@@ -150,7 +153,7 @@ public class ViewTest implements Serializable {
             .apply(View.<Integer>asSingleton());
 
     pipeline.apply("Create123", Create.of(1, 2, 3))
-        .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
+        .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
           @Override
           public void processElement(ProcessContext c) {
             c.output(c.sideInput(view));
@@ -175,7 +178,7 @@ public class ViewTest implements Serializable {
     final PCollectionView<Integer> view = oneTwoThree.apply(View.<Integer>asSingleton());
 
     oneTwoThree.apply(
-        "OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
+        "OutputSideInputs", ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
           @Override
           public void processElement(ProcessContext c) {
             c.output(c.sideInput(view));
@@ -201,16 +204,17 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> output =
         pipeline.apply("CreateMainInput", Create.of(29, 31))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                checkArgument(c.sideInput(view).size() == 4);
-                checkArgument(c.sideInput(view).get(0) == c.sideInput(view).get(0));
-                for (Integer i : c.sideInput(view)) {
-                  c.output(i);
-                }
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    checkArgument(c.sideInput(view).size() == 4);
+                    checkArgument(c.sideInput(view).get(0) == c.sideInput(view).get(0));
+                    for (Integer i : c.sideInput(view)) {
+                      c.output(i);
+                    }
+                  }
+                }));
 
     PAssert.that(output).containsInAnyOrder(11, 13, 17, 23, 11, 13, 17, 23);
 
@@ -237,19 +241,21 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> output =
         pipeline.apply("CreateMainInput", Create.timestamped(
-                                              TimestampedValue.of(29, new Instant(1)),
-                                              TimestampedValue.of(35, new Instant(11))))
+            TimestampedValue.of(29, new Instant(1)),
+            TimestampedValue.of(35, new Instant(11))))
             .apply("MainWindowInto", Window.<Integer>into(FixedWindows.of(Duration.millis(10))))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                checkArgument(c.sideInput(view).size() == 4);
-                checkArgument(c.sideInput(view).get(0) == c.sideInput(view).get(0));
-                for (Integer i : c.sideInput(view)) {
-                  c.output(i);
-                }
-              }
-            }));
+            .apply(
+                "OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    checkArgument(c.sideInput(view).size() == 4);
+                    checkArgument(c.sideInput(view).get(0) == c.sideInput(view).get(0));
+                    for (Integer i : c.sideInput(view)) {
+                      c.output(i);
+                    }
+                  }
+                }));
 
     PAssert.that(output).containsInAnyOrder(11, 13, 17, 23, 31, 33, 37, 43);
 
@@ -267,16 +273,17 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> results =
         pipeline.apply("Create1", Create.of(1))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                assertTrue(c.sideInput(view).isEmpty());
-                assertFalse(c.sideInput(view).iterator().hasNext());
-                c.output(1);
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    assertTrue(c.sideInput(view).isEmpty());
+                    assertFalse(c.sideInput(view).iterator().hasNext());
+                    c.output(1);
+                  }
+                }));
 
-    // Pass at least one value through to guarantee that DoFn executes.
+    // Pass at least one value through to guarantee that OldDoFn executes.
     PAssert.that(results).containsInAnyOrder(1);
 
     pipeline.run();
@@ -292,36 +299,37 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> output =
         pipeline.apply("CreateMainInput", Create.of(29))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                try {
-                  c.sideInput(view).clear();
-                  fail("Expected UnsupportedOperationException on clear()");
-                } catch (UnsupportedOperationException expected) {
-                }
-                try {
-                  c.sideInput(view).add(4);
-                  fail("Expected UnsupportedOperationException on add()");
-                } catch (UnsupportedOperationException expected) {
-                }
-                try {
-                  c.sideInput(view).addAll(new ArrayList<Integer>());
-                  fail("Expected UnsupportedOperationException on addAll()");
-                } catch (UnsupportedOperationException expected) {
-                }
-                try {
-                  c.sideInput(view).remove(0);
-                  fail("Expected UnsupportedOperationException on remove()");
-                } catch (UnsupportedOperationException expected) {
-                }
-                for (Integer i : c.sideInput(view)) {
-                  c.output(i);
-                }
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    try {
+                      c.sideInput(view).clear();
+                      fail("Expected UnsupportedOperationException on clear()");
+                    } catch (UnsupportedOperationException expected) {
+                    }
+                    try {
+                      c.sideInput(view).add(4);
+                      fail("Expected UnsupportedOperationException on add()");
+                    } catch (UnsupportedOperationException expected) {
+                    }
+                    try {
+                      c.sideInput(view).addAll(new ArrayList<Integer>());
+                      fail("Expected UnsupportedOperationException on addAll()");
+                    } catch (UnsupportedOperationException expected) {
+                    }
+                    try {
+                      c.sideInput(view).remove(0);
+                      fail("Expected UnsupportedOperationException on remove()");
+                    } catch (UnsupportedOperationException expected) {
+                    }
+                    for (Integer i : c.sideInput(view)) {
+                      c.output(i);
+                    }
+                  }
+                }));
 
-    // Pass at least one value through to guarantee that DoFn executes.
+    // Pass at least one value through to guarantee that OldDoFn executes.
     PAssert.that(output).containsInAnyOrder(11);
 
     pipeline.run();
@@ -338,14 +346,15 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> output =
         pipeline.apply("CreateMainInput", Create.of(29, 31))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                for (Integer i : c.sideInput(view)) {
-                  c.output(i);
-                }
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    for (Integer i : c.sideInput(view)) {
+                      c.output(i);
+                    }
+                  }
+                }));
 
     PAssert.that(output).containsInAnyOrder(11, 13, 17, 23, 11, 13, 17, 23);
 
@@ -371,18 +380,21 @@ public class ViewTest implements Serializable {
             .apply(View.<Integer>asIterable());
 
     PCollection<Integer> output =
-        pipeline.apply("CreateMainInput", Create.timestamped(
-                                              TimestampedValue.of(29, new Instant(1)),
-                                              TimestampedValue.of(35, new Instant(11))))
+        pipeline
+            .apply("CreateMainInput",
+                Create.timestamped(
+                    TimestampedValue.of(29, new Instant(1)),
+                    TimestampedValue.of(35, new Instant(11))))
             .apply("MainWindowInto", Window.<Integer>into(FixedWindows.of(Duration.millis(10))))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                for (Integer i : c.sideInput(view)) {
-                  c.output(i);
-                }
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    for (Integer i : c.sideInput(view)) {
+                      c.output(i);
+                    }
+                  }
+                }));
 
     PAssert.that(output).containsInAnyOrder(11, 13, 17, 23, 31, 33, 37, 43);
 
@@ -400,15 +412,16 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> results =
         pipeline.apply("Create1", Create.of(1))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                assertFalse(c.sideInput(view).iterator().hasNext());
-                c.output(1);
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    assertFalse(c.sideInput(view).iterator().hasNext());
+                    c.output(1);
+                  }
+                }));
 
-    // Pass at least one value through to guarantee that DoFn executes.
+    // Pass at least one value through to guarantee that OldDoFn executes.
     PAssert.that(results).containsInAnyOrder(1);
 
     pipeline.run();
@@ -424,22 +437,23 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> output =
         pipeline.apply("CreateMainInput", Create.of(29))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                Iterator<Integer> iterator = c.sideInput(view).iterator();
-                while (iterator.hasNext()) {
-                  try {
-                    iterator.remove();
-                    fail("Expected UnsupportedOperationException on remove()");
-                  } catch (UnsupportedOperationException expected) {
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    Iterator<Integer> iterator = c.sideInput(view).iterator();
+                    while (iterator.hasNext()) {
+                      try {
+                        iterator.remove();
+                        fail("Expected UnsupportedOperationException on remove()");
+                      } catch (UnsupportedOperationException expected) {
+                      }
+                      c.output(iterator.next());
+                    }
                   }
-                  c.output(iterator.next());
-                }
-              }
-            }));
+                }));
 
-    // Pass at least one value through to guarantee that DoFn executes.
+    // Pass at least one value through to guarantee that OldDoFn executes.
     PAssert.that(output).containsInAnyOrder(11);
 
     pipeline.run();
@@ -458,7 +472,7 @@ public class ViewTest implements Serializable {
         pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
             .apply(
                 "OutputSideInputs",
-                ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+                ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     for (Integer v : c.sideInput(view).get(c.element().substring(0, 1))) {
@@ -486,7 +500,7 @@ public class ViewTest implements Serializable {
         pipeline.apply("CreateMainInput", Create.of(2 /* size */))
             .apply(
                 "OutputSideInputs",
-                ParDo.withSideInputs(view).of(new DoFn<Integer, KV<String, Integer>>() {
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     assertEquals((int) c.element(), c.sideInput(view).size());
@@ -540,7 +554,7 @@ public class ViewTest implements Serializable {
         pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
             .apply(
                 "OutputSideInputs",
-                ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+                ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     for (Integer v : c.sideInput(view).get(c.element().substring(0, 1))) {
@@ -577,7 +591,7 @@ public class ViewTest implements Serializable {
                                               TimestampedValue.of("blackberry", new Instant(16))))
             .apply("MainWindowInto", Window.<String>into(FixedWindows.of(Duration.millis(10))))
             .apply("OutputSideInputs", ParDo.withSideInputs(view).of(
-                                           new DoFn<String, KV<String, Integer>>() {
+                                           new OldDoFn<String, KV<String, Integer>>() {
                                              @Override
                                              public void processElement(ProcessContext c) {
                                                for (Integer v :
@@ -615,7 +629,7 @@ public class ViewTest implements Serializable {
                                               TimestampedValue.of(1 /* size */, new Instant(16))))
             .apply("MainWindowInto", Window.<Integer>into(FixedWindows.of(Duration.millis(10))))
             .apply("OutputSideInputs", ParDo.withSideInputs(view).of(
-                                           new DoFn<Integer, KV<String, Integer>>() {
+                                           new OldDoFn<Integer, KV<String, Integer>>() {
                                              @Override
                                              public void processElement(ProcessContext c) {
                                                assertEquals((int) c.element(),
@@ -660,7 +674,7 @@ public class ViewTest implements Serializable {
                                               TimestampedValue.of("blackberry", new Instant(16))))
             .apply("MainWindowInto", Window.<String>into(FixedWindows.of(Duration.millis(10))))
             .apply("OutputSideInputs", ParDo.withSideInputs(view).of(
-                                           new DoFn<String, KV<String, Integer>>() {
+                                           new OldDoFn<String, KV<String, Integer>>() {
                                              @Override
                                              public void processElement(ProcessContext c) {
                                                for (Integer v :
@@ -689,17 +703,18 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> results =
         pipeline.apply("Create1", Create.of(1))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                assertTrue(c.sideInput(view).isEmpty());
-                assertTrue(c.sideInput(view).entrySet().isEmpty());
-                assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
-                c.output(c.element());
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    assertTrue(c.sideInput(view).isEmpty());
+                    assertTrue(c.sideInput(view).entrySet().isEmpty());
+                    assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
+                    c.output(c.element());
+                  }
+                }));
 
-    // Pass at least one value through to guarantee that DoFn executes.
+    // Pass at least one value through to guarantee that OldDoFn executes.
     PAssert.that(results).containsInAnyOrder(1);
 
     pipeline.run();
@@ -718,17 +733,18 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> results =
         pipeline.apply("Create1", Create.of(1))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                assertTrue(c.sideInput(view).isEmpty());
-                assertTrue(c.sideInput(view).entrySet().isEmpty());
-                assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
-                c.output(c.element());
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    assertTrue(c.sideInput(view).isEmpty());
+                    assertTrue(c.sideInput(view).entrySet().isEmpty());
+                    assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
+                    c.output(c.element());
+                  }
+                }));
 
-    // Pass at least one value through to guarantee that DoFn executes.
+    // Pass at least one value through to guarantee that OldDoFn executes.
     PAssert.that(results).containsInAnyOrder(1);
 
     pipeline.run();
@@ -747,7 +763,7 @@ public class ViewTest implements Serializable {
         pipeline.apply("CreateMainInput", Create.of("apple"))
             .apply(
                 "OutputSideInputs",
-                ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+                ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     try {
@@ -776,7 +792,7 @@ public class ViewTest implements Serializable {
                   }
                 }));
 
-    // Pass at least one value through to guarantee that DoFn executes.
+    // Pass at least one value through to guarantee that OldDoFn executes.
     PAssert.that(output).containsInAnyOrder(KV.of("apple", 1));
 
     pipeline.run();
@@ -795,7 +811,7 @@ public class ViewTest implements Serializable {
         pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
             .apply(
                 "OutputSideInputs",
-                ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+                ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.output(
@@ -822,7 +838,7 @@ public class ViewTest implements Serializable {
         pipeline.apply("CreateMainInput", Create.of(2 /* size */))
             .apply(
                 "OutputSideInputs",
-                ParDo.withSideInputs(view).of(new DoFn<Integer, KV<String, Integer>>() {
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     assertEquals((int) c.element(), c.sideInput(view).size());
@@ -854,7 +870,7 @@ public class ViewTest implements Serializable {
         pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
             .apply(
                 "OutputSideInputs",
-                ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+                ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.output(
@@ -890,7 +906,7 @@ public class ViewTest implements Serializable {
                                               TimestampedValue.of("blackberry", new Instant(16))))
             .apply("MainWindowInto", Window.<String>into(FixedWindows.of(Duration.millis(10))))
             .apply("OutputSideInputs", ParDo.withSideInputs(view).of(
-                                           new DoFn<String, KV<String, Integer>>() {
+                                           new OldDoFn<String, KV<String, Integer>>() {
                                              @Override
                                              public void processElement(ProcessContext c) {
                                                c.output(KV.of(
@@ -927,7 +943,7 @@ public class ViewTest implements Serializable {
                                               TimestampedValue.of(1 /* size */, new Instant(16))))
             .apply("MainWindowInto", Window.<Integer>into(FixedWindows.of(Duration.millis(10))))
             .apply("OutputSideInputs", ParDo.withSideInputs(view).of(
-                                           new DoFn<Integer, KV<String, Integer>>() {
+                                           new OldDoFn<Integer, KV<String, Integer>>() {
                                              @Override
                                              public void processElement(ProcessContext c) {
                                                assertEquals((int) c.element(),
@@ -972,7 +988,7 @@ public class ViewTest implements Serializable {
                                               TimestampedValue.of("blackberry", new Instant(16))))
             .apply("MainWindowInto", Window.<String>into(FixedWindows.of(Duration.millis(10))))
             .apply("OutputSideInputs", ParDo.withSideInputs(view).of(
-                                           new DoFn<String, KV<String, Integer>>() {
+                                           new OldDoFn<String, KV<String, Integer>>() {
                                              @Override
                                              public void processElement(ProcessContext c) {
                                                c.output(KV.of(
@@ -1000,17 +1016,18 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> results =
         pipeline.apply("Create1", Create.of(1))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                assertTrue(c.sideInput(view).isEmpty());
-                assertTrue(c.sideInput(view).entrySet().isEmpty());
-                assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
-                c.output(c.element());
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    assertTrue(c.sideInput(view).isEmpty());
+                    assertTrue(c.sideInput(view).entrySet().isEmpty());
+                    assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
+                    c.output(c.element());
+                  }
+                }));
 
-    // Pass at least one value through to guarantee that DoFn executes.
+    // Pass at least one value through to guarantee that OldDoFn executes.
     PAssert.that(results).containsInAnyOrder(1);
 
     pipeline.run();
@@ -1028,17 +1045,18 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> results =
         pipeline.apply("Create1", Create.of(1))
-            .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                assertTrue(c.sideInput(view).isEmpty());
-                assertTrue(c.sideInput(view).entrySet().isEmpty());
-                assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
-                c.output(c.element());
-              }
-            }));
+            .apply("OutputSideInputs",
+                ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    assertTrue(c.sideInput(view).isEmpty());
+                    assertTrue(c.sideInput(view).entrySet().isEmpty());
+                    assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
+                    c.output(c.element());
+                  }
+                }));
 
-    // Pass at least one value through to guarantee that DoFn executes.
+    // Pass at least one value through to guarantee that OldDoFn executes.
     PAssert.that(results).containsInAnyOrder(1);
 
     pipeline.run();
@@ -1062,7 +1080,7 @@ public class ViewTest implements Serializable {
         pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
             .apply(
                 "OutputSideInputs",
-                ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+                ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.output(
@@ -1093,7 +1111,7 @@ public class ViewTest implements Serializable {
         pipeline.apply("CreateMainInput", Create.of("apple"))
             .apply(
                 "OutputSideInputs",
-                ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+                ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     try {
@@ -1121,7 +1139,7 @@ public class ViewTest implements Serializable {
                   }
                 }));
 
-    // Pass at least one value through to guarantee that DoFn executes.
+    // Pass at least one value through to guarantee that OldDoFn executes.
     PAssert.that(output).containsInAnyOrder(KV.of("apple", 1));
 
     pipeline.run();
@@ -1139,12 +1157,14 @@ public class ViewTest implements Serializable {
 
     PCollection<KV<String, Integer>> output =
         pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
-            .apply("Output", ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
-              @Override
-              public void processElement(ProcessContext c) {
-                c.output(KV.of(c.element(), c.sideInput(view).get(c.element().substring(0, 1))));
-              }
-            }));
+            .apply("Output",
+                ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
+                  @Override
+                  public void processElement(ProcessContext c) {
+                    c.output(KV
+                        .of(c.element(), c.sideInput(view).get(c.element().substring(0, 1))));
+                  }
+                }));
 
     PAssert.that(output).containsInAnyOrder(
         KV.of("apple", 21), KV.of("banana", 3), KV.of("blackberry", 3));
@@ -1173,7 +1193,7 @@ public class ViewTest implements Serializable {
                                        TimestampedValue.of("C", new Instant(7))))
             .apply("WindowMainInput", Window.<String>into(FixedWindows.of(Duration.millis(10))))
             .apply("OutputMainAndSideInputs", ParDo.withSideInputs(view).of(
-                                                  new DoFn<String, String>() {
+                                                  new OldDoFn<String, String>() {
                                                     @Override
                                                     public void processElement(ProcessContext c) {
                                                       c.output(c.element() + c.sideInput(view));
@@ -1206,7 +1226,7 @@ public class ViewTest implements Serializable {
                                        TimestampedValue.of("C", new Instant(7))))
             .apply("WindowMainInput", Window.<String>into(FixedWindows.of(Duration.millis(10))))
             .apply("OutputMainAndSideInputs", ParDo.withSideInputs(view).of(
-                                                  new DoFn<String, String>() {
+                                                  new OldDoFn<String, String>() {
                                                     @Override
                                                     public void processElement(ProcessContext c) {
                                                       c.output(c.element() + c.sideInput(view));
@@ -1237,7 +1257,7 @@ public class ViewTest implements Serializable {
                                        TimestampedValue.of("C", new Instant(7))))
             .apply("WindowMainInput", Window.<String>into(FixedWindows.of(Duration.millis(10))))
             .apply("OutputMainAndSideInputs", ParDo.withSideInputs(view).of(
-                                                  new DoFn<String, String>() {
+                                                  new OldDoFn<String, String>() {
                                                     @Override
                                                     public void processElement(ProcessContext c) {
                                                       c.output(c.element() + c.sideInput(view));
@@ -1267,7 +1287,7 @@ public class ViewTest implements Serializable {
         p.apply("CreateMainInput", Create.of(""))
             .apply(
                 "OutputMainAndSideInputs",
-                ParDo.withSideInputs(view).of(new DoFn<String, String>() {
+                ParDo.withSideInputs(view).of(new OldDoFn<String, String>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.output(c.element() + c.sideInput(view));
@@ -1285,7 +1305,7 @@ public class ViewTest implements Serializable {
     Pipeline pipeline = TestPipeline.create();
     final PCollectionView<Iterable<Integer>> view1 =
         pipeline.apply("CreateVoid1", Create.of((Void) null).withCoder(VoidCoder.of()))
-            .apply("OutputOneInteger", ParDo.of(new DoFn<Void, Integer>() {
+            .apply("OutputOneInteger", ParDo.of(new OldDoFn<Void, Integer>() {
               @Override
               public void processElement(ProcessContext c) {
                 c.output(17);
@@ -1297,7 +1317,7 @@ public class ViewTest implements Serializable {
         pipeline.apply("CreateVoid2", Create.of((Void) null).withCoder(VoidCoder.of()))
             .apply(
                 "OutputSideInput",
-                ParDo.withSideInputs(view1).of(new DoFn<Void, Iterable<Integer>>() {
+                ParDo.withSideInputs(view1).of(new OldDoFn<Void, Iterable<Integer>>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     c.output(c.sideInput(view1));
@@ -1307,8 +1327,8 @@ public class ViewTest implements Serializable {
 
     PCollection<Integer> output =
         pipeline.apply("CreateVoid3", Create.of((Void) null).withCoder(VoidCoder.of()))
-            .apply(
-                "ReadIterableSideInput", ParDo.withSideInputs(view2).of(new DoFn<Void, Integer>() {
+            .apply("ReadIterableSideInput",
+                ParDo.withSideInputs(view2).of(new OldDoFn<Void, Integer>() {
                   @Override
                   public void processElement(ProcessContext c) {
                     for (Iterable<Integer> input : c.sideInput(view2)) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
index ac67bb4..d2ba452 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
@@ -65,9 +65,9 @@ public class WithTimestampsTest implements Serializable {
          .apply(WithTimestamps.of(timestampFn));
 
     PCollection<KV<String, Instant>> timestampedVals =
-        timestamped.apply(ParDo.of(new DoFn<String, KV<String, Instant>>() {
+        timestamped.apply(ParDo.of(new OldDoFn<String, KV<String, Instant>>() {
           @Override
-          public void processElement(DoFn<String, KV<String, Instant>>.ProcessContext c)
+          public void processElement(OldDoFn<String, KV<String, Instant>>.ProcessContext c)
               throws Exception {
             c.output(KV.of(c.element(), c.timestamp()));
           }
@@ -150,9 +150,9 @@ public class WithTimestampsTest implements Serializable {
              WithTimestamps.of(backInTimeFn).withAllowedTimestampSkew(skew.plus(100L)));
 
     PCollection<KV<String, Instant>> timestampedVals =
-        timestampedWithSkew.apply(ParDo.of(new DoFn<String, KV<String, Instant>>() {
+        timestampedWithSkew.apply(ParDo.of(new OldDoFn<String, KV<String, Instant>>() {
           @Override
-          public void processElement(DoFn<String, KV<String, Instant>>.ProcessContext c)
+          public void processElement(OldDoFn<String, KV<String, Instant>>.ProcessContext c)
               throws Exception {
             c.output(KV.of(c.element(), c.timestamp()));
           }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
index ce32b7d..c1848c6 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
@@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.not;
 import static org.junit.Assert.assertThat;
 
 import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.PBegin;
@@ -50,7 +50,7 @@ public class DisplayDataEvaluatorTest implements Serializable {
         new PTransform<PCollection<String>, POutput> () {
           @Override
           public PCollection<String> apply(PCollection<String> input) {
-            return input.apply(ParDo.of(new DoFn<String, String>() {
+            return input.apply(ParDo.of(new OldDoFn<String, String>() {
               @Override
               public void processElement(ProcessContext c) throws Exception {
                 c.output(c.element());
@@ -79,7 +79,7 @@ public class DisplayDataEvaluatorTest implements Serializable {
   @Test
   public void testPrimitiveTransform() {
     PTransform<? super PCollection<Integer>, ? super PCollection<Integer>> myTransform = ParDo.of(
-        new DoFn<Integer, Integer>() {
+        new OldDoFn<Integer, Integer>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {}
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java
index 07029e9..fa44390 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java
@@ -22,6 +22,7 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasName
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasType;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertThat;



[36/51] [abbrv] incubator-beam git commit: Port mentions of OldDoFn in PipelineOptions

Posted by ke...@apache.org.
Port mentions of OldDoFn in PipelineOptions


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/f5011e5c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/f5011e5c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/f5011e5c

Branch: refs/heads/python-sdk
Commit: f5011e5c62cb00fb4d8a91bd7d55d5083789a307
Parents: 620bd99
Author: Kenneth Knowles <kl...@google.com>
Authored: Wed Aug 3 19:56:33 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 14:56:42 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/beam/sdk/options/PipelineOptions.java   | 9 ++++-----
 1 file changed, 4 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/f5011e5c/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
index 365f668..4595fc8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
@@ -22,8 +22,8 @@ import org.apache.beam.sdk.options.GoogleApiDebugOptions.GoogleApiTracer;
 import org.apache.beam.sdk.options.ProxyInvocationHandler.Deserializer;
 import org.apache.beam.sdk.options.ProxyInvocationHandler.Serializer;
 import org.apache.beam.sdk.runners.PipelineRunner;
-import org.apache.beam.sdk.transforms.OldDoFn;
-import org.apache.beam.sdk.transforms.OldDoFn.Context;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.DoFn.Context;
 import org.apache.beam.sdk.transforms.display.HasDisplayData;
 
 import com.google.auto.service.AutoService;
@@ -35,7 +35,6 @@ import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
 import java.lang.reflect.Proxy;
 import java.util.ServiceLoader;
-
 import javax.annotation.concurrent.ThreadSafe;
 
 /**
@@ -52,7 +51,7 @@ import javax.annotation.concurrent.ThreadSafe;
  * and {@link PipelineOptionsFactory#as(Class)}. They can be created
  * from command-line arguments with {@link PipelineOptionsFactory#fromArgs(String[])}.
  * They can be converted to another type by invoking {@link PipelineOptions#as(Class)} and
- * can be accessed from within a {@link OldDoFn} by invoking
+ * can be accessed from within a {@link DoFn} by invoking
  * {@link Context#getPipelineOptions()}.
  *
  * <p>For example:
@@ -151,7 +150,7 @@ import javax.annotation.concurrent.ThreadSafe;
  * {@link PipelineOptionsFactory#withValidation()} is invoked.
  *
  * <p>{@link JsonIgnore @JsonIgnore} is used to prevent a property from being serialized and
- * available during execution of {@link OldDoFn}. See the Serialization section below for more
+ * available during execution of {@link DoFn}. See the Serialization section below for more
  * details.
  *
  * <h2>Registration Of PipelineOptions</h2>


[18/51] [abbrv] incubator-beam git commit: SparkRunner calls pipeline.run

Posted by ke...@apache.org.
SparkRunner calls pipeline.run

* Remove SparkStreamingPipelineOptions.
* Run pipeline with Pipeline.run().
* Better EmbeddedKafka.
* Avoid NPE if factory wasn't created.
* Let EmbeddedKafka/Zookeeper discover ports on their own.


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/ac0875de
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/ac0875de
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/ac0875de

Branch: refs/heads/python-sdk
Commit: ac0875de84085e1298575d0887e83e5deee5f418
Parents: c314e67
Author: Sela <an...@paypal.com>
Authored: Wed Jul 27 23:11:37 2016 +0300
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 22:31:48 2016 -0700

----------------------------------------------------------------------
 .../runners/spark/SparkPipelineOptions.java     |  6 ++++
 .../apache/beam/runners/spark/SparkRunner.java  | 14 +++------
 .../runners/spark/SparkRunnerRegistrar.java     |  6 ++--
 .../spark/SparkStreamingPipelineOptions.java    | 32 --------------------
 .../beam/runners/spark/TestSparkRunner.java     |  2 --
 .../apache/beam/runners/spark/DeDupTest.java    |  2 +-
 .../beam/runners/spark/EmptyInputTest.java      |  2 +-
 .../beam/runners/spark/SimpleWordCountTest.java |  4 +--
 .../runners/spark/SparkRunnerRegistrarTest.java |  2 +-
 .../apache/beam/runners/spark/TfIdfTest.java    |  2 +-
 .../beam/runners/spark/io/AvroPipelineTest.java |  2 +-
 .../beam/runners/spark/io/NumShardsTest.java    |  2 +-
 .../io/hadoop/HadoopFileFormatPipelineTest.java |  2 +-
 .../spark/translation/CombineGloballyTest.java  |  2 +-
 .../spark/translation/CombinePerKeyTest.java    |  2 +-
 .../spark/translation/DoFnOutputTest.java       |  6 ++--
 .../translation/MultiOutputWordCountTest.java   |  2 +-
 .../spark/translation/SerializationTest.java    |  2 +-
 .../spark/translation/SideEffectsTest.java      |  8 ++---
 .../streaming/FlattenStreamingTest.java         |  8 ++---
 .../streaming/KafkaStreamingTest.java           | 13 ++++----
 .../streaming/SimpleStreamingWordCountTest.java |  8 ++---
 .../streaming/utils/EmbeddedKafkaCluster.java   |  4 ++-
 23 files changed, 49 insertions(+), 84 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineOptions.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineOptions.java
index 4bb2a57..6ef3741 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineOptions.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineOptions.java
@@ -33,4 +33,10 @@ public interface SparkPipelineOptions extends PipelineOptions, StreamingOptions,
   @Default.String("local[1]")
   String getSparkMaster();
   void setSparkMaster(String master);
+
+  @Description("Timeout to wait (in msec) for a streaming execution to stop, -1 runs until "
+          + "execution is stopped")
+  @Default.Long(-1)
+  Long getTimeout();
+  void setTimeout(Long batchInterval);
 }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java
index dfda987..d994ec4 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java
@@ -69,8 +69,6 @@ import org.slf4j.LoggerFactory;
  * options.setSparkMaster("spark://host:port");
  * EvaluationResult result = SparkRunner.create(options).run(p);
  * }
- *
- * To create a Spark streaming pipeline runner use {@link SparkStreamingPipelineOptions}
  */
 public final class SparkRunner extends PipelineRunner<EvaluationResult> {
 
@@ -146,12 +144,6 @@ public final class SparkRunner extends PipelineRunner<EvaluationResult> {
   @Override
   public EvaluationResult run(Pipeline pipeline) {
     try {
-      // validate streaming configuration
-      if (mOptions.isStreaming() && !(mOptions instanceof SparkStreamingPipelineOptions)) {
-        throw new RuntimeException("A streaming job must be configured with "
-            + SparkStreamingPipelineOptions.class.getSimpleName() + ", found "
-            + mOptions.getClass().getSimpleName());
-      }
       LOG.info("Executing pipeline using the SparkRunner.");
       JavaSparkContext jsc = SparkContextFactory.getSparkContext(mOptions
               .getSparkMaster(), mOptions.getAppName());
@@ -179,6 +171,9 @@ public final class SparkRunner extends PipelineRunner<EvaluationResult> {
 
         return ctxt;
       } else {
+        if (mOptions.getTimeout() > 0) {
+          LOG.info("Timeout is ignored by the SparkRunner in batch.");
+        }
         EvaluationContext ctxt = new EvaluationContext(jsc, pipeline);
         SparkPipelineTranslator translator = new TransformTranslator.Translator();
         pipeline.traverseTopologically(new SparkPipelineEvaluator(ctxt, translator));
@@ -210,9 +205,8 @@ public final class SparkRunner extends PipelineRunner<EvaluationResult> {
   private EvaluationContext
       createStreamingEvaluationContext(JavaSparkContext jsc, Pipeline pipeline,
       Duration batchDuration) {
-    SparkStreamingPipelineOptions streamingOptions = (SparkStreamingPipelineOptions) mOptions;
     JavaStreamingContext jssc = new JavaStreamingContext(jsc, batchDuration);
-    return new StreamingEvaluationContext(jsc, pipeline, jssc, streamingOptions.getTimeout());
+    return new StreamingEvaluationContext(jsc, pipeline, jssc, mOptions.getTimeout());
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerRegistrar.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerRegistrar.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerRegistrar.java
index 2bed6a5..7a31753 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerRegistrar.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerRegistrar.java
@@ -49,15 +49,13 @@ public final class SparkRunnerRegistrar {
   }
 
   /**
-   * Registers the {@link SparkPipelineOptions} and {@link SparkStreamingPipelineOptions}.
+   * Registers the {@link SparkPipelineOptions}.
    */
   @AutoService(PipelineOptionsRegistrar.class)
   public static class Options implements PipelineOptionsRegistrar {
     @Override
     public Iterable<Class<? extends PipelineOptions>> getPipelineOptions() {
-      return ImmutableList.<Class<? extends PipelineOptions>>of(
-          SparkPipelineOptions.class,
-          SparkStreamingPipelineOptions.class);
+      return ImmutableList.<Class<? extends PipelineOptions>>of(SparkPipelineOptions.class);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkStreamingPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkStreamingPipelineOptions.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkStreamingPipelineOptions.java
deleted file mode 100644
index 5944acd..0000000
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkStreamingPipelineOptions.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.spark;
-
-import org.apache.beam.sdk.options.Default;
-import org.apache.beam.sdk.options.Description;
-
-/**
- * Options used to configure Spark streaming.
- */
-public interface SparkStreamingPipelineOptions extends SparkPipelineOptions {
-  @Description("Timeout to wait (in msec) for the streaming execution so stop, -1 runs until "
-          + "execution is stopped")
-  @Default.Long(-1)
-  Long getTimeout();
-  void setTimeout(Long batchInterval);
-}

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkRunner.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkRunner.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkRunner.java
index e2b953d..50ed5f3 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkRunner.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkRunner.java
@@ -46,8 +46,6 @@ import org.apache.beam.sdk.values.POutput;
  * options.setSparkMaster("spark://host:port");
  * EvaluationResult result = SparkRunner.create(options).run(p);
  * }
- *
- * To create a Spark streaming pipeline runner use {@link SparkStreamingPipelineOptions}
  */
 public final class TestSparkRunner extends PipelineRunner<EvaluationResult> {
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/DeDupTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/DeDupTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/DeDupTest.java
index dcf04a7..9a16744 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/DeDupTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/DeDupTest.java
@@ -56,7 +56,7 @@ public class DeDupTest {
 
     PAssert.that(output).containsInAnyOrder(EXPECTED_SET);
 
-    EvaluationResult res = SparkRunner.create().run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     res.close();
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/EmptyInputTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/EmptyInputTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/EmptyInputTest.java
index 7befec2..c2e331f 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/EmptyInputTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/EmptyInputTest.java
@@ -49,7 +49,7 @@ public class EmptyInputTest {
     PCollection<String> inputWords = p.apply(Create.of(empty).withCoder(StringUtf8Coder.of()));
     PCollection<String> output = inputWords.apply(Combine.globally(new ConcatWords()));
 
-    EvaluationResult res = SparkRunner.create().run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     assertEquals("", Iterables.getOnlyElement(res.get(output)));
     res.close();
   }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/SimpleWordCountTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/SimpleWordCountTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/SimpleWordCountTest.java
index 6f5ce5e..441d92d 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/SimpleWordCountTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/SimpleWordCountTest.java
@@ -67,7 +67,7 @@ public class SimpleWordCountTest {
 
     PAssert.that(output).containsInAnyOrder(EXPECTED_COUNT_SET);
 
-    EvaluationResult res = SparkRunner.create().run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     res.close();
   }
 
@@ -87,7 +87,7 @@ public class SimpleWordCountTest {
     File outputFile = testFolder.newFile();
     output.apply("WriteCounts", TextIO.Write.to(outputFile.getAbsolutePath()).withoutSharding());
 
-    EvaluationResult res = SparkRunner.create().run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     res.close();
 
     assertThat(Sets.newHashSet(FileUtils.readLines(outputFile)),

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkRunnerRegistrarTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkRunnerRegistrarTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkRunnerRegistrarTest.java
index 236251b..3ca9df4 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkRunnerRegistrarTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkRunnerRegistrarTest.java
@@ -41,7 +41,7 @@ public class SparkRunnerRegistrarTest {
   @Test
   public void testOptions() {
     assertEquals(
-        ImmutableList.of(SparkPipelineOptions.class, SparkStreamingPipelineOptions.class),
+        ImmutableList.of(SparkPipelineOptions.class),
         new SparkRunnerRegistrar.Options().getPipelineOptions());
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
index e4a293f..074e6aa 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
@@ -72,7 +72,7 @@ public class TfIdfTest {
 
     PAssert.that(words).containsInAnyOrder(Arrays.asList("a", "m", "n", "b", "c", "d"));
 
-    EvaluationResult res = SparkRunner.create().run(pipeline);
+    EvaluationResult res = (EvaluationResult) pipeline.run();
     res.close();
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/io/AvroPipelineTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/io/AvroPipelineTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/io/AvroPipelineTest.java
index 787292e..d862424 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/io/AvroPipelineTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/io/AvroPipelineTest.java
@@ -81,7 +81,7 @@ public class AvroPipelineTest {
     PCollection<GenericRecord> input = p.apply(
         AvroIO.Read.from(inputFile.getAbsolutePath()).withSchema(schema));
     input.apply(AvroIO.Write.to(outputDir.getAbsolutePath()).withSchema(schema));
-    EvaluationResult res = SparkRunner.create().run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     res.close();
 
     List<GenericRecord> records = readGenericFile();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/io/NumShardsTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/io/NumShardsTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/io/NumShardsTest.java
index 36d8b67..9c65917 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/io/NumShardsTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/io/NumShardsTest.java
@@ -79,7 +79,7 @@ public class NumShardsTest {
     PCollection<String> output = inputWords.apply(new WordCount.CountWords())
         .apply(MapElements.via(new WordCount.FormatAsTextFn()));
     output.apply(TextIO.Write.to(outputDir.getAbsolutePath()).withNumShards(3).withSuffix(".txt"));
-    EvaluationResult res = SparkRunner.create().run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     res.close();
 
     int count = 0;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/io/hadoop/HadoopFileFormatPipelineTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/io/hadoop/HadoopFileFormatPipelineTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/io/hadoop/HadoopFileFormatPipelineTest.java
index 6d09503..01aa839 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/io/hadoop/HadoopFileFormatPipelineTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/io/hadoop/HadoopFileFormatPipelineTest.java
@@ -92,7 +92,7 @@ public class HadoopFileFormatPipelineTest {
     HadoopIO.Write.Bound<IntWritable, Text> write = HadoopIO.Write.to(outputFile.getAbsolutePath(),
         outputFormatClass, IntWritable.class, Text.class);
     input.apply(write.withoutSharding());
-    EvaluationResult res = SparkRunner.create().run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     res.close();
 
     IntWritable key = new IntWritable();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombineGloballyTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombineGloballyTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombineGloballyTest.java
index 798f55a..e4ef7d7 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombineGloballyTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombineGloballyTest.java
@@ -55,7 +55,7 @@ public class CombineGloballyTest {
     PCollection<String> inputWords = p.apply(Create.of(WORDS).withCoder(StringUtf8Coder.of()));
     PCollection<String> output = inputWords.apply(Combine.globally(new WordMerger()));
 
-    EvaluationResult res = SparkRunner.create().run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     assertEquals("hi there,hi,hi sue bob,hi sue,,bob hi",
         Iterables.getOnlyElement(res.get(output)));
     res.close();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
index 2e477e9..dee9213 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
@@ -57,7 +57,7 @@ public class CombinePerKeyTest {
         Pipeline p = Pipeline.create(options);
         PCollection<String> inputWords = p.apply(Create.of(WORDS).withCoder(StringUtf8Coder.of()));
         PCollection<KV<String, Long>> cnts = inputWords.apply(new SumPerKey<String>());
-        EvaluationResult res = SparkRunner.create().run(p);
+        EvaluationResult res = (EvaluationResult) p.run();
         Map<String, Long> actualCnts = new HashMap<>();
         for (KV<String, Long> kv : res.get(cnts)) {
             actualCnts.put(kv.getKey(), kv.getValue());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
index 263ce99..e4b25bb 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
@@ -41,9 +41,9 @@ public class DoFnOutputTest implements Serializable {
   public void test() throws Exception {
     SparkPipelineOptions options = PipelineOptionsFactory.as(SparkPipelineOptions.class);
     options.setRunner(SparkRunner.class);
-    Pipeline pipeline = Pipeline.create(options);
+    Pipeline p = Pipeline.create(options);
 
-    PCollection<String> strings = pipeline.apply(Create.of("a"));
+    PCollection<String> strings = p.apply(Create.of("a"));
     // Test that values written from startBundle() and finishBundle() are written to
     // the output
     PCollection<String> output = strings.apply(ParDo.of(new OldDoFn<String, String>() {
@@ -63,7 +63,7 @@ public class DoFnOutputTest implements Serializable {
 
     PAssert.that(output).containsInAnyOrder("start", "a", "finish");
 
-    EvaluationResult res = SparkRunner.create().run(pipeline);
+    EvaluationResult res = (EvaluationResult) p.run();
     res.close();
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
index 739eec3..066521b 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
@@ -84,7 +84,7 @@ public class MultiOutputWordCountTest {
     PCollection<Long> unique = luc.get(lowerCnts).apply(
         ApproximateUnique.<KV<String, Long>>globally(16));
 
-    EvaluationResult res = SparkRunner.create().run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     PAssert.that(luc.get(lowerCnts).apply(ParDo.of(new FormatCountsFn())))
         .containsInAnyOrder(EXPECTED_LOWER_COUNTS);
     Iterable<KV<String, Long>> actualUpper = res.get(luc.get(upperCnts));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
index 5e96c46..fb97b8b 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
@@ -140,7 +140,7 @@ public class SerializationTest {
 
     PAssert.that(output).containsInAnyOrder(EXPECTED_COUNT_SET);
 
-    EvaluationResult res = SparkRunner.create().run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     res.close();
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
index 5775565..6cefa49 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
@@ -50,11 +50,11 @@ public class SideEffectsTest implements Serializable {
   public void test() throws Exception {
     SparkPipelineOptions options = PipelineOptionsFactory.as(SparkPipelineOptions.class);
     options.setRunner(SparkRunner.class);
-    Pipeline pipeline = Pipeline.create(options);
+    Pipeline p = Pipeline.create(options);
 
-    pipeline.getCoderRegistry().registerCoder(URI.class, StringDelegateCoder.of(URI.class));
+    p.getCoderRegistry().registerCoder(URI.class, StringDelegateCoder.of(URI.class));
 
-    pipeline.apply(Create.of("a")).apply(ParDo.of(new OldDoFn<String, String>() {
+    p.apply(Create.of("a")).apply(ParDo.of(new OldDoFn<String, String>() {
       @Override
       public void processElement(ProcessContext c) throws Exception {
         throw new UserException();
@@ -62,7 +62,7 @@ public class SideEffectsTest implements Serializable {
     }));
 
     try {
-      pipeline.run();
+      p.run();
       fail("Run should thrown an exception");
     } catch (RuntimeException e) {
       assertNotNull(e.getCause());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/FlattenStreamingTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/FlattenStreamingTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/FlattenStreamingTest.java
index ed7e9b7..deb1b6a 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/FlattenStreamingTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/FlattenStreamingTest.java
@@ -18,8 +18,8 @@
 package org.apache.beam.runners.spark.translation.streaming;
 
 import org.apache.beam.runners.spark.EvaluationResult;
+import org.apache.beam.runners.spark.SparkPipelineOptions;
 import org.apache.beam.runners.spark.SparkRunner;
-import org.apache.beam.runners.spark.SparkStreamingPipelineOptions;
 import org.apache.beam.runners.spark.io.CreateStream;
 import org.apache.beam.runners.spark.translation.streaming.utils.PAssertStreaming;
 import org.apache.beam.sdk.Pipeline;
@@ -57,8 +57,8 @@ public class FlattenStreamingTest {
 
   @Test
   public void testRun() throws Exception {
-    SparkStreamingPipelineOptions options =
-        PipelineOptionsFactory.as(SparkStreamingPipelineOptions.class);
+    SparkPipelineOptions options =
+        PipelineOptionsFactory.as(SparkPipelineOptions.class);
     options.setRunner(SparkRunner.class);
     options.setStreaming(true);
     options.setTimeout(TEST_TIMEOUT_MSEC); // run for one interval
@@ -77,7 +77,7 @@ public class FlattenStreamingTest {
 
     PAssertStreaming.assertContents(union, EXPECTED_UNION);
 
-    EvaluationResult res = SparkRunner.create(options).run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     res.close();
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
index c005f14..fa98ca3 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
@@ -18,8 +18,8 @@
 package org.apache.beam.runners.spark.translation.streaming;
 
 import org.apache.beam.runners.spark.EvaluationResult;
+import org.apache.beam.runners.spark.SparkPipelineOptions;
 import org.apache.beam.runners.spark.SparkRunner;
-import org.apache.beam.runners.spark.SparkStreamingPipelineOptions;
 import org.apache.beam.runners.spark.io.KafkaIO;
 import org.apache.beam.runners.spark.translation.streaming.utils.EmbeddedKafkaCluster;
 import org.apache.beam.runners.spark.translation.streaming.utils.PAssertStreaming;
@@ -56,10 +56,9 @@ import kafka.serializer.StringDecoder;
  */
 public class KafkaStreamingTest {
   private static final EmbeddedKafkaCluster.EmbeddedZookeeper EMBEDDED_ZOOKEEPER =
-          new EmbeddedKafkaCluster.EmbeddedZookeeper(17001);
+          new EmbeddedKafkaCluster.EmbeddedZookeeper();
   private static final EmbeddedKafkaCluster EMBEDDED_KAFKA_CLUSTER =
-          new EmbeddedKafkaCluster(EMBEDDED_ZOOKEEPER.getConnection(),
-                  new Properties(), Collections.singletonList(6667));
+          new EmbeddedKafkaCluster(EMBEDDED_ZOOKEEPER.getConnection(), new Properties());
   private static final String TOPIC = "kafka_dataflow_test_topic";
   private static final Map<String, String> KAFKA_MESSAGES = ImmutableMap.of(
       "k1", "v1", "k2", "v2", "k3", "v3", "k4", "v4"
@@ -89,8 +88,8 @@ public class KafkaStreamingTest {
   @Test
   public void testRun() throws Exception {
     // test read from Kafka
-    SparkStreamingPipelineOptions options =
-        PipelineOptionsFactory.as(SparkStreamingPipelineOptions.class);
+    SparkPipelineOptions options =
+        PipelineOptionsFactory.as(SparkPipelineOptions.class);
     options.setRunner(SparkRunner.class);
     options.setStreaming(true);
     options.setTimeout(TEST_TIMEOUT_MSEC); // run for one interval
@@ -112,7 +111,7 @@ public class KafkaStreamingTest {
 
     PAssertStreaming.assertContents(formattedKV, EXPECTED);
 
-    EvaluationResult res = SparkRunner.create(options).run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     res.close();
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/SimpleStreamingWordCountTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/SimpleStreamingWordCountTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/SimpleStreamingWordCountTest.java
index 4fa03f6..5627056 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/SimpleStreamingWordCountTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/SimpleStreamingWordCountTest.java
@@ -19,8 +19,8 @@ package org.apache.beam.runners.spark.translation.streaming;
 
 
 import org.apache.beam.runners.spark.EvaluationResult;
+import org.apache.beam.runners.spark.SparkPipelineOptions;
 import org.apache.beam.runners.spark.SparkRunner;
-import org.apache.beam.runners.spark.SparkStreamingPipelineOptions;
 import org.apache.beam.runners.spark.examples.WordCount;
 import org.apache.beam.runners.spark.io.CreateStream;
 import org.apache.beam.runners.spark.translation.streaming.utils.PAssertStreaming;
@@ -54,8 +54,8 @@ public class SimpleStreamingWordCountTest implements Serializable {
 
   @Test
   public void testRun() throws Exception {
-    SparkStreamingPipelineOptions options =
-        PipelineOptionsFactory.as(SparkStreamingPipelineOptions.class);
+    SparkPipelineOptions options =
+        PipelineOptionsFactory.as(SparkPipelineOptions.class);
     options.setRunner(SparkRunner.class);
     options.setStreaming(true);
     options.setTimeout(TEST_TIMEOUT_MSEC); // run for one interval
@@ -70,7 +70,7 @@ public class SimpleStreamingWordCountTest implements Serializable {
         .apply(MapElements.via(new WordCount.FormatAsTextFn()));
 
     PAssertStreaming.assertContents(output, EXPECTED_COUNTS);
-    EvaluationResult res = SparkRunner.create(options).run(p);
+    EvaluationResult res = (EvaluationResult) p.run();
     res.close();
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ac0875de/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/utils/EmbeddedKafkaCluster.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/utils/EmbeddedKafkaCluster.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/utils/EmbeddedKafkaCluster.java
index 0fec573..cd326ef 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/utils/EmbeddedKafkaCluster.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/utils/EmbeddedKafkaCluster.java
@@ -219,7 +219,9 @@ public class EmbeddedKafkaCluster {
 
 
     public void shutdown() {
-      factory.shutdown();
+      if (factory != null) {
+        factory.shutdown();
+      }
       try {
         TestUtils.deleteFile(snapshotDir);
       } catch (FileNotFoundException e) {


[39/51] [abbrv] incubator-beam git commit: Correctly determine if DoFn has an anonymous class in ParDo

Posted by ke...@apache.org.
Correctly determine if DoFn has an anonymous class in ParDo


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/0b186529
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/0b186529
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/0b186529

Branch: refs/heads/python-sdk
Commit: 0b1865295cb89d88878d0a021df103ed45240924
Parents: fcf6b1d
Author: Kenneth Knowles <kl...@google.com>
Authored: Thu Aug 4 14:54:56 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 14:56:42 2016 -0700

----------------------------------------------------------------------
 .../core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/0b186529/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
index bb1af9c..91f6203 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
@@ -958,7 +958,7 @@ public class ParDo {
     @Override
     protected String getKindString() {
       Class<?> clazz = DoFnReflector.getDoFnClass(fn);
-      if (fn.getClass().isAnonymousClass()) {
+      if (clazz.isAnonymousClass()) {
         return "AnonymousParMultiDo";
       } else {
         return String.format("ParMultiDo(%s)", StringUtils.approximateSimpleName(clazz));


[32/51] [abbrv] incubator-beam git commit: Port examples to new DoFn

Posted by ke...@apache.org.
Port examples to new DoFn

Port example tests to new DoFn

Port TfIdf example to new DoFn

Port TopWikipediaSessions example to new DoFn

Port GameState Java 8 example to new DoFn

Port the UserScore example to new DoFn

Port StreamingWordExtract example to new DoFn

Port TrafficMaxLaneFlow to new DoFn

Port TrafficeRoutes example to new DoFn

Port DatastoreWordCount example to new DoFn

Port BigQueryTornadoes example to new DoFn

Port MaxPerKeyExamples to new DoFn

Port CombinePerKeyExamples to new DoFn

Port TriggerExample to new DoFn

Port JoinExamples to new DoFn

Port FilterExamples to new DoFn

Fix mention of DoFn in WordCountTest


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/71e027dc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/71e027dc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/71e027dc

Branch: refs/heads/python-sdk
Commit: 71e027dc1ff7d5de0eea82278427546c07e26e8f
Parents: 734bfb9
Author: Kenneth Knowles <kl...@google.com>
Authored: Wed Aug 3 18:54:22 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Thu Aug 4 11:54:29 2016 -0700

----------------------------------------------------------------------
 .../examples/complete/StreamingWordExtract.java | 18 ++++++------
 .../apache/beam/examples/complete/TfIdf.java    | 28 +++++++++---------
 .../examples/complete/TopWikipediaSessions.java | 31 +++++++++-----------
 .../examples/complete/TrafficMaxLaneFlow.java   | 16 +++++-----
 .../beam/examples/complete/TrafficRoutes.java   | 20 ++++++-------
 .../examples/cookbook/BigQueryTornadoes.java    | 10 +++----
 .../cookbook/CombinePerKeyExamples.java         | 10 +++----
 .../examples/cookbook/DatastoreWordCount.java   | 14 ++++-----
 .../beam/examples/cookbook/FilterExamples.java  | 20 ++++++-------
 .../beam/examples/cookbook/JoinExamples.java    | 18 ++++++------
 .../examples/cookbook/MaxPerKeyExamples.java    | 10 +++----
 .../beam/examples/cookbook/TriggerExample.java  | 25 ++++++++--------
 .../org/apache/beam/examples/WordCountTest.java |  3 +-
 .../examples/complete/AutoCompleteTest.java     |  6 ++--
 .../examples/cookbook/TriggerExampleTest.java   |  6 ++--
 .../beam/examples/complete/game/GameStats.java  | 22 +++++++-------
 .../beam/examples/complete/game/UserScore.java  |  6 ++--
 .../examples/complete/game/UserScoreTest.java   |  2 +-
 18 files changed, 130 insertions(+), 135 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java b/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
index b0c9ffd..3f30f21 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
@@ -28,7 +28,7 @@ import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.StreamingOptions;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 
 import com.google.api.services.bigquery.model.TableFieldSchema;
@@ -55,9 +55,9 @@ import java.util.ArrayList;
  */
 public class StreamingWordExtract {
 
-  /** A OldDoFn that tokenizes lines of text into individual words. */
-  static class ExtractWords extends OldDoFn<String, String> {
-    @Override
+  /** A {@link DoFn} that tokenizes lines of text into individual words. */
+  static class ExtractWords extends DoFn<String, String> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       String[] words = c.element().split("[^a-zA-Z']+");
       for (String word : words) {
@@ -68,9 +68,9 @@ public class StreamingWordExtract {
     }
   }
 
-  /** A OldDoFn that uppercases a word. */
-  static class Uppercase extends OldDoFn<String, String> {
-    @Override
+  /** A {@link DoFn} that uppercases a word. */
+  static class Uppercase extends DoFn<String, String> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       c.output(c.element().toUpperCase());
     }
@@ -79,11 +79,11 @@ public class StreamingWordExtract {
   /**
    * Converts strings into BigQuery rows.
    */
-  static class StringToRowConverter extends OldDoFn<String, TableRow> {
+  static class StringToRowConverter extends DoFn<String, TableRow> {
     /**
      * In this example, put the whole string into single BigQuery field.
      */
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       c.output(new TableRow().set("string_field", c.element()));
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
index 470a689..76b6b6a 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
@@ -30,9 +30,9 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Count;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.Keys;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.RemoveDuplicates;
@@ -225,8 +225,8 @@ public class TfIdf {
       // of the words in the document associated with that that URI.
       PCollection<KV<URI, String>> uriToWords = uriToContent
           .apply("SplitWords", ParDo.of(
-              new OldDoFn<KV<URI, String>, KV<URI, String>>() {
-                @Override
+              new DoFn<KV<URI, String>, KV<URI, String>>() {
+                @ProcessElement
                 public void processElement(ProcessContext c) {
                   URI uri = c.element().getKey();
                   String line = c.element().getValue();
@@ -268,8 +268,8 @@ public class TfIdf {
       // by the URI key.
       PCollection<KV<URI, KV<String, Long>>> uriToWordAndCount = uriAndWordToCount
           .apply("ShiftKeys", ParDo.of(
-              new OldDoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
-                @Override
+              new DoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
+                @ProcessElement
                 public void processElement(ProcessContext c) {
                   URI uri = c.element().getKey().getKey();
                   String word = c.element().getKey().getValue();
@@ -307,8 +307,8 @@ public class TfIdf {
       // divided by the total number of words in the document.
       PCollection<KV<String, KV<URI, Double>>> wordToUriAndTf = uriToWordAndCountAndTotal
           .apply("ComputeTermFrequencies", ParDo.of(
-              new OldDoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
-                @Override
+              new DoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+                @ProcessElement
                 public void processElement(ProcessContext c) {
                   URI uri = c.element().getKey();
                   Long wordTotal = c.element().getValue().getOnly(wordTotalsTag);
@@ -328,12 +328,12 @@ public class TfIdf {
       // documents in which the word appears divided by the total
       // number of documents in the corpus. Note how the total number of
       // documents is passed as a side input; the same value is
-      // presented to each invocation of the OldDoFn.
+      // presented to each invocation of the DoFn.
       PCollection<KV<String, Double>> wordToDf = wordToDocCount
           .apply("ComputeDocFrequencies", ParDo
               .withSideInputs(totalDocuments)
-              .of(new OldDoFn<KV<String, Long>, KV<String, Double>>() {
-                @Override
+              .of(new DoFn<KV<String, Long>, KV<String, Double>>() {
+                @ProcessElement
                 public void processElement(ProcessContext c) {
                   String word = c.element().getKey();
                   Long documentCount = c.element().getValue();
@@ -361,8 +361,8 @@ public class TfIdf {
       // divided by the log of the document frequency.
       PCollection<KV<String, KV<URI, Double>>> wordToUriAndTfIdf = wordToUriAndTfAndDf
           .apply("ComputeTfIdf", ParDo.of(
-              new OldDoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
-                @Override
+              new DoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+                @ProcessElement
                 public void processElement(ProcessContext c) {
                   String word = c.element().getKey();
                   Double df = c.element().getValue().getOnly(dfTag);
@@ -400,8 +400,8 @@ public class TfIdf {
     @Override
     public PDone apply(PCollection<KV<String, KV<URI, Double>>> wordToUriAndTfIdf) {
       return wordToUriAndTfIdf
-          .apply("Format", ParDo.of(new OldDoFn<KV<String, KV<URI, Double>>, String>() {
-            @Override
+          .apply("Format", ParDo.of(new DoFn<KV<String, KV<URI, Double>>, String>() {
+            @ProcessElement
             public void processElement(ProcessContext c) {
               c.output(String.format("%s,\t%s,\t%f",
                   c.element().getKey(),

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
index 0ed89d2..aff41cc 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
@@ -26,12 +26,12 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.OldDoFn;
-import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableComparator;
 import org.apache.beam.sdk.transforms.Top;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.CalendarWindows;
 import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
 import org.apache.beam.sdk.transforms.windowing.Sessions;
@@ -85,8 +85,8 @@ public class TopWikipediaSessions {
   /**
    * Extracts user and timestamp from a TableRow representing a Wikipedia edit.
    */
-  static class ExtractUserAndTimestamp extends OldDoFn<TableRow, String> {
-    @Override
+  static class ExtractUserAndTimestamp extends DoFn<TableRow, String> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       TableRow row = c.element();
       int timestamp = (Integer) row.get("timestamp");
@@ -132,24 +132,21 @@ public class TopWikipediaSessions {
     }
   }
 
-  static class SessionsToStringsDoFn extends OldDoFn<KV<String, Long>, KV<String, Long>>
-      implements RequiresWindowAccess {
-
-    @Override
-    public void processElement(ProcessContext c) {
+  static class SessionsToStringsDoFn extends DoFn<KV<String, Long>, KV<String, Long>> {
+    @ProcessElement
+    public void processElement(ProcessContext c, BoundedWindow window) {
       c.output(KV.of(
-          c.element().getKey() + " : " + c.window(), c.element().getValue()));
+          c.element().getKey() + " : " + window, c.element().getValue()));
     }
   }
 
-  static class FormatOutputDoFn extends OldDoFn<List<KV<String, Long>>, String>
-      implements RequiresWindowAccess {
-    @Override
-    public void processElement(ProcessContext c) {
+  static class FormatOutputDoFn extends DoFn<List<KV<String, Long>>, String> {
+    @ProcessElement
+    public void processElement(ProcessContext c, BoundedWindow window) {
       for (KV<String, Long> item : c.element()) {
         String session = item.getKey();
         long count = item.getValue();
-        c.output(session + " : " + count + " : " + ((IntervalWindow) c.window()).start());
+        c.output(session + " : " + count + " : " + ((IntervalWindow) window).start());
       }
     }
   }
@@ -168,8 +165,8 @@ public class TopWikipediaSessions {
           .apply(ParDo.of(new ExtractUserAndTimestamp()))
 
           .apply("SampleUsers", ParDo.of(
-              new OldDoFn<String, String>() {
-                @Override
+              new DoFn<String, String>() {
+                @ProcessElement
                 public void processElement(ProcessContext c) {
                   if (Math.abs(c.element().hashCode()) <= Integer.MAX_VALUE * samplingThreshold) {
                     c.output(c.element());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
index 9122015..394b432 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
@@ -30,7 +30,7 @@ import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -145,12 +145,12 @@ public class TrafficMaxLaneFlow {
   /**
    * Extract the timestamp field from the input string, and use it as the element timestamp.
    */
-  static class ExtractTimestamps extends OldDoFn<String, String> {
+  static class ExtractTimestamps extends DoFn<String, String> {
     private static final DateTimeFormatter dateTimeFormat =
         DateTimeFormat.forPattern("MM/dd/yyyy HH:mm:ss");
 
-    @Override
-    public void processElement(OldDoFn<String, String>.ProcessContext c) throws Exception {
+    @ProcessElement
+    public void processElement(DoFn<String, String>.ProcessContext c) throws Exception {
       String[] items = c.element().split(",");
       if (items.length > 0) {
         try {
@@ -170,9 +170,9 @@ public class TrafficMaxLaneFlow {
    * information. The number of lanes for which data is present depends upon which freeway the data
    * point comes from.
    */
-  static class ExtractFlowInfoFn extends OldDoFn<String, KV<String, LaneInfo>> {
+  static class ExtractFlowInfoFn extends DoFn<String, KV<String, LaneInfo>> {
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       String[] items = c.element().split(",");
       if (items.length < 48) {
@@ -226,8 +226,8 @@ public class TrafficMaxLaneFlow {
    * Format the results of the Max Lane flow calculation to a TableRow, to save to BigQuery.
    * Add the timestamp from the window context.
    */
-  static class FormatMaxesFn extends OldDoFn<KV<String, LaneInfo>, TableRow> {
-    @Override
+  static class FormatMaxesFn extends DoFn<KV<String, LaneInfo>, TableRow> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
 
       LaneInfo laneInfo = c.element().getValue();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
index 30091b6..ef716e9 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
@@ -29,8 +29,8 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
 import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
@@ -149,12 +149,12 @@ public class TrafficRoutes {
   /**
    * Extract the timestamp field from the input string, and use it as the element timestamp.
    */
-  static class ExtractTimestamps extends OldDoFn<String, String> {
+  static class ExtractTimestamps extends DoFn<String, String> {
     private static final DateTimeFormatter dateTimeFormat =
         DateTimeFormat.forPattern("MM/dd/yyyy HH:mm:ss");
 
-    @Override
-    public void processElement(OldDoFn<String, String>.ProcessContext c) throws Exception {
+    @ProcessElement
+    public void processElement(DoFn<String, String>.ProcessContext c) throws Exception {
       String[] items = c.element().split(",");
       String timestamp = tryParseTimestamp(items);
       if (timestamp != null) {
@@ -171,9 +171,9 @@ public class TrafficRoutes {
    * Filter out readings for the stations along predefined 'routes', and output
    * (station, speed info) keyed on route.
    */
-  static class ExtractStationSpeedFn extends OldDoFn<String, KV<String, StationSpeed>> {
+  static class ExtractStationSpeedFn extends DoFn<String, KV<String, StationSpeed>> {
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       String[] items = c.element().split(",");
       String stationType = tryParseStationType(items);
@@ -200,8 +200,8 @@ public class TrafficRoutes {
    * Note: these calculations are for example purposes only, and are unrealistic and oversimplified.
    */
   static class GatherStats
-      extends OldDoFn<KV<String, Iterable<StationSpeed>>, KV<String, RouteInfo>> {
-    @Override
+      extends DoFn<KV<String, Iterable<StationSpeed>>, KV<String, RouteInfo>> {
+    @ProcessElement
     public void processElement(ProcessContext c) throws IOException {
       String route = c.element().getKey();
       double speedSum = 0.0;
@@ -243,8 +243,8 @@ public class TrafficRoutes {
   /**
    * Format the results of the slowdown calculations to a TableRow, to save to BigQuery.
    */
-  static class FormatStatsFn extends OldDoFn<KV<String, RouteInfo>, TableRow> {
-    @Override
+  static class FormatStatsFn extends DoFn<KV<String, RouteInfo>, TableRow> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       RouteInfo routeInfo = c.element().getValue();
       TableRow row = new TableRow()

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
index 6002b11..09d9c29 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
@@ -25,7 +25,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.KV;
@@ -81,8 +81,8 @@ public class BigQueryTornadoes {
    * Examines each row in the input table. If a tornado was recorded
    * in that sample, the month in which it occurred is output.
    */
-  static class ExtractTornadoesFn extends OldDoFn<TableRow, Integer> {
-    @Override
+  static class ExtractTornadoesFn extends DoFn<TableRow, Integer> {
+    @ProcessElement
     public void processElement(ProcessContext c){
       TableRow row = c.element();
       if ((Boolean) row.get("tornado")) {
@@ -95,8 +95,8 @@ public class BigQueryTornadoes {
    * Prepares the data for writing to BigQuery by building a TableRow object containing an
    * integer representation of month and the number of tornadoes that occurred in each month.
    */
-  static class FormatCountsFn extends OldDoFn<KV<Integer, Long>, TableRow> {
-    @Override
+  static class FormatCountsFn extends DoFn<KV<Integer, Long>, TableRow> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       TableRow row = new TableRow()
           .set("month", c.element().getKey())

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
index d0bce5d..67918a3 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -90,11 +90,11 @@ public class CombinePerKeyExamples {
    * Examines each row in the input table. If the word is greater than or equal to MIN_WORD_LENGTH,
    * outputs word, play_name.
    */
-  static class ExtractLargeWordsFn extends OldDoFn<TableRow, KV<String, String>> {
+  static class ExtractLargeWordsFn extends DoFn<TableRow, KV<String, String>> {
     private final Aggregator<Long, Long> smallerWords =
         createAggregator("smallerWords", new Sum.SumLongFn());
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c){
       TableRow row = c.element();
       String playName = (String) row.get("corpus");
@@ -114,8 +114,8 @@ public class CombinePerKeyExamples {
    * Prepares the data for writing to BigQuery by building a TableRow object
    * containing a word with a string listing the plays in which it appeared.
    */
-  static class FormatShakespeareOutputFn extends OldDoFn<KV<String, String>, TableRow> {
-    @Override
+  static class FormatShakespeareOutputFn extends DoFn<KV<String, String>, TableRow> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       TableRow row = new TableRow()
           .set("word", c.element().getKey())

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
index 1850e89..21220b8 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
@@ -32,8 +32,8 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.MapElements;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 
 import com.google.datastore.v1beta3.Entity;
@@ -79,11 +79,11 @@ import javax.annotation.Nullable;
 public class DatastoreWordCount {
 
   /**
-   * A OldDoFn that gets the content of an entity (one line in a
+   * A {@link DoFn} that gets the content of an entity (one line in a
    * Shakespeare play) and converts it to a string.
    */
-  static class GetContentFn extends OldDoFn<Entity, String> {
-    @Override
+  static class GetContentFn extends DoFn<Entity, String> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       Map<String, Value> props = c.element().getProperties();
       Value value = props.get("content");
@@ -108,9 +108,9 @@ public class DatastoreWordCount {
   }
 
   /**
-   * A OldDoFn that creates entity for every line in Shakespeare.
+   * A {@link DoFn} that creates an entity for every line in Shakespeare.
    */
-  static class CreateEntityFn extends OldDoFn<String, Entity> {
+  static class CreateEntityFn extends DoFn<String, Entity> {
     private final String namespace;
     private final String kind;
     private final Key ancestorKey;
@@ -140,7 +140,7 @@ public class DatastoreWordCount {
       return entityBuilder.build();
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       c.output(makeEntity(c.element()));
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
index 06fba77..9a0f7a2 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Mean;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.View;
@@ -98,8 +98,8 @@ public class FilterExamples {
    * Examines each row in the input table. Outputs only the subset of the cells this example
    * is interested in-- the mean_temp and year, month, and day-- as a bigquery table row.
    */
-  static class ProjectionFn extends OldDoFn<TableRow, TableRow> {
-    @Override
+  static class ProjectionFn extends DoFn<TableRow, TableRow> {
+    @ProcessElement
     public void processElement(ProcessContext c){
       TableRow row = c.element();
       // Grab year, month, day, mean_temp from the row
@@ -119,16 +119,16 @@ public class FilterExamples {
    * Implements 'filter' functionality.
    *
    * <p>Examines each row in the input table. Outputs only rows from the month
-   * monthFilter, which is passed in as a parameter during construction of this OldDoFn.
+   * monthFilter, which is passed in as a parameter during construction of this DoFn.
    */
-  static class FilterSingleMonthDataFn extends OldDoFn<TableRow, TableRow> {
+  static class FilterSingleMonthDataFn extends DoFn<TableRow, TableRow> {
     Integer monthFilter;
 
     public FilterSingleMonthDataFn(Integer monthFilter) {
       this.monthFilter = monthFilter;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c){
       TableRow row = c.element();
       Integer month;
@@ -143,8 +143,8 @@ public class FilterExamples {
    * Examines each row (weather reading) in the input table. Output the temperature
    * reading for that row ('mean_temp').
    */
-  static class ExtractTempFn extends OldDoFn<TableRow, Double> {
-    @Override
+  static class ExtractTempFn extends DoFn<TableRow, Double> {
+    @ProcessElement
     public void processElement(ProcessContext c){
       TableRow row = c.element();
       Double meanTemp = Double.parseDouble(row.get("mean_temp").toString());
@@ -191,8 +191,8 @@ public class FilterExamples {
       PCollection<TableRow> filteredRows = monthFilteredRows
           .apply("ParseAndFilter", ParDo
               .withSideInputs(globalMeanTemp)
-              .of(new OldDoFn<TableRow, TableRow>() {
-                @Override
+              .of(new DoFn<TableRow, TableRow>() {
+                @ProcessElement
                 public void processElement(ProcessContext c) {
                   Double meanTemp = Double.parseDouble(c.element().get("mean_temp").toString());
                   Double gTemp = c.sideInput(globalMeanTemp);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
index 5260c0d..5ff2ce2 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.join.CoGbkResult;
 import org.apache.beam.sdk.transforms.join.CoGroupByKey;
@@ -99,8 +99,8 @@ public class JoinExamples {
     // country code 'key' -> string of <event info>, <country name>
     PCollection<KV<String, String>> finalResultCollection =
       kvpCollection.apply("Process", ParDo.of(
-        new OldDoFn<KV<String, CoGbkResult>, KV<String, String>>() {
-          @Override
+        new DoFn<KV<String, CoGbkResult>, KV<String, String>>() {
+          @ProcessElement
           public void processElement(ProcessContext c) {
             KV<String, CoGbkResult> e = c.element();
             String countryCode = e.getKey();
@@ -116,8 +116,8 @@ public class JoinExamples {
 
     // write to GCS
     PCollection<String> formattedResults = finalResultCollection
-        .apply("Format", ParDo.of(new OldDoFn<KV<String, String>, String>() {
-          @Override
+        .apply("Format", ParDo.of(new DoFn<KV<String, String>, String>() {
+          @ProcessElement
           public void processElement(ProcessContext c) {
             String outputstring = "Country code: " + c.element().getKey()
                 + ", " + c.element().getValue();
@@ -131,8 +131,8 @@ public class JoinExamples {
    * Examines each row (event) in the input table. Output a KV with the key the country
    * code of the event, and the value a string encoding event information.
    */
-  static class ExtractEventDataFn extends OldDoFn<TableRow, KV<String, String>> {
-    @Override
+  static class ExtractEventDataFn extends DoFn<TableRow, KV<String, String>> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       TableRow row = c.element();
       String countryCode = (String) row.get("ActionGeo_CountryCode");
@@ -149,8 +149,8 @@ public class JoinExamples {
    * Examines each row (country info) in the input table. Output a KV with the key the country
    * code, and the value the country name.
    */
-  static class ExtractCountryInfoFn extends OldDoFn<TableRow, KV<String, String>> {
-    @Override
+  static class ExtractCountryInfoFn extends DoFn<TableRow, KV<String, String>> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       TableRow row = c.element();
       String countryCode = (String) row.get("FIPSCC");

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
index 1bcb491..4f266d3 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Max;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.KV;
@@ -82,8 +82,8 @@ public class MaxPerKeyExamples {
    * Examines each row (weather reading) in the input table. Output the month of the reading,
    * and the mean_temp.
    */
-  static class ExtractTempFn extends OldDoFn<TableRow, KV<Integer, Double>> {
-    @Override
+  static class ExtractTempFn extends DoFn<TableRow, KV<Integer, Double>> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       TableRow row = c.element();
       Integer month = Integer.parseInt((String) row.get("month"));
@@ -96,8 +96,8 @@ public class MaxPerKeyExamples {
    * Format the results to a TableRow, to save to BigQuery.
    *
    */
-  static class FormatMaxesFn extends OldDoFn<KV<Integer, Double>, TableRow> {
-    @Override
+  static class FormatMaxesFn extends DoFn<KV<Integer, Double>, TableRow> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       TableRow row = new TableRow()
           .set("month", c.element().getKey())

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
index 0be9921..04ac2c3 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
@@ -28,14 +28,14 @@ import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.StreamingOptions;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.transforms.OldDoFn;
-import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.AfterEach;
 import org.apache.beam.sdk.transforms.windowing.AfterProcessingTime;
 import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.FixedWindows;
 import org.apache.beam.sdk.transforms.windowing.Repeatedly;
 import org.apache.beam.sdk.transforms.windowing.Window;
@@ -342,9 +342,9 @@ public class TriggerExample {
           .apply(GroupByKey.<String, Integer>create());
 
       PCollection<KV<String, String>> results = flowPerFreeway.apply(ParDo.of(
-          new OldDoFn<KV<String, Iterable<Integer>>, KV<String, String>>() {
+          new DoFn<KV<String, Iterable<Integer>>, KV<String, String>>() {
 
-            @Override
+            @ProcessElement
             public void processElement(ProcessContext c) throws Exception {
               Iterable<Integer> flows = c.element().getValue();
               Integer sum = 0;
@@ -365,22 +365,21 @@ public class TriggerExample {
    * Format the results of the Total flow calculation to a TableRow, to save to BigQuery.
    * Adds the triggerType, pane information, processing time and the window timestamp.
    * */
-  static class FormatTotalFlow extends OldDoFn<KV<String, String>, TableRow>
-  implements  RequiresWindowAccess {
+  static class FormatTotalFlow extends DoFn<KV<String, String>, TableRow> {
     private String triggerType;
 
     public FormatTotalFlow(String triggerType) {
       this.triggerType = triggerType;
     }
-    @Override
-    public void processElement(ProcessContext c) throws Exception {
+    @ProcessElement
+    public void processElement(ProcessContext c, BoundedWindow window) throws Exception {
       String[] values = c.element().getValue().split(",");
       TableRow row = new TableRow()
           .set("trigger_type", triggerType)
           .set("freeway", c.element().getKey())
           .set("total_flow", Integer.parseInt(values[0]))
           .set("number_of_records", Long.parseLong(values[1]))
-          .set("window", c.window().toString())
+          .set("window", window.toString())
           .set("isFirst", c.pane().isFirst())
           .set("isLast", c.pane().isLast())
           .set("timing", c.pane().getTiming().toString())
@@ -394,8 +393,8 @@ public class TriggerExample {
    * Extract the freeway and total flow in a reading.
    * Freeway is used as key since we are calculating the total flow for each freeway.
    */
-  static class ExtractFlowInfo extends OldDoFn<String, KV<String, Integer>> {
-    @Override
+  static class ExtractFlowInfo extends DoFn<String, KV<String, Integer>> {
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       String[] laneInfo = c.element().split(",");
       if (laneInfo[0].equals("timestamp")) {
@@ -471,13 +470,13 @@ public class TriggerExample {
    * Add current time to each record.
    * Also insert a delay at random to demo the triggers.
    */
-  public static class InsertDelays extends OldDoFn<String, String> {
+  public static class InsertDelays extends DoFn<String, String> {
     private static final double THRESHOLD = 0.001;
     // MIN_DELAY and MAX_DELAY in minutes.
     private static final int MIN_DELAY = 1;
     private static final int MAX_DELAY = 100;
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       Instant timestamp = Instant.now();
       if (Math.random() < THRESHOLD){

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
----------------------------------------------------------------------
diff --git a/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java b/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
index 26bf8fb..9d36a3e 100644
--- a/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
+++ b/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
@@ -26,6 +26,7 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.DoFnTester;
 import org.apache.beam.sdk.transforms.MapElements;
 import org.apache.beam.sdk.values.PCollection;
@@ -46,7 +47,7 @@ import java.util.List;
 @RunWith(JUnit4.class)
 public class WordCountTest {
 
-  /** Example test that tests a specific OldDoFn. */
+  /** Example test that tests a specific {@link DoFn}. */
   @Test
   public void testExtractWordsFn() throws Exception {
     DoFnTester<String, String> extractWordsFn =

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
----------------------------------------------------------------------
diff --git a/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java b/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
index 6f68ce8..6f28dec 100644
--- a/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
+++ b/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
@@ -23,8 +23,8 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Filter;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -171,8 +171,8 @@ public class AutoCompleteTest implements Serializable {
       extends PTransform<PCollection<TimestampedValue<T>>, PCollection<T>> {
     @Override
     public PCollection<T> apply(PCollection<TimestampedValue<T>> input) {
-      return input.apply(ParDo.of(new OldDoFn<TimestampedValue<T>, T>() {
-        @Override
+      return input.apply(ParDo.of(new DoFn<TimestampedValue<T>, T>() {
+        @ProcessElement
         public void processElement(ProcessContext c) {
           c.outputWithTimestamp(c.element().getValue(), c.element().getTimestamp());
         }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
----------------------------------------------------------------------
diff --git a/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java b/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
index e72a9e8..fee3c14 100644
--- a/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
+++ b/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.RunnableOnService;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.DoFnTester;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.FixedWindows;
 import org.apache.beam.sdk.transforms.windowing.Window;
@@ -141,8 +141,8 @@ public class TriggerExampleTest {
     return Joiner.on(",").join(entries);
   }
 
-  static class FormatResults extends OldDoFn<TableRow, String> {
-    @Override
+  static class FormatResults extends DoFn<TableRow, String> {
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       TableRow element = c.element();
       TableRow row = new TableRow()

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
index b1407f6..01ffb1d 100644
--- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
+++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
@@ -27,15 +27,15 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.MapElements;
 import org.apache.beam.sdk.transforms.Mean;
-import org.apache.beam.sdk.transforms.OldDoFn;
-import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.transforms.Values;
 import org.apache.beam.sdk.transforms.View;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.FixedWindows;
 import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
 import org.apache.beam.sdk.transforms.windowing.OutputTimeFns;
@@ -126,10 +126,10 @@ public class GameStats extends LeaderBoard {
           .apply("ProcessAndFilter", ParDo
               // use the derived mean total score as a side input
               .withSideInputs(globalMeanScore)
-              .of(new OldDoFn<KV<String, Integer>, KV<String, Integer>>() {
+              .of(new DoFn<KV<String, Integer>, KV<String, Integer>>() {
                 private final Aggregator<Long, Long> numSpammerUsers =
                   createAggregator("SpammerUsers", new Sum.SumLongFn());
-                @Override
+                @ProcessElement
                 public void processElement(ProcessContext c) {
                   Integer score = c.element().getValue();
                   Double gmc = c.sideInput(globalMeanScore);
@@ -149,12 +149,10 @@ public class GameStats extends LeaderBoard {
   /**
    * Calculate and output an element's session duration.
    */
-  private static class UserSessionInfoFn extends OldDoFn<KV<String, Integer>, Integer>
-      implements RequiresWindowAccess {
-
-    @Override
-    public void processElement(ProcessContext c) {
-      IntervalWindow w = (IntervalWindow) c.window();
+  private static class UserSessionInfoFn extends DoFn<KV<String, Integer>, Integer> {
+    @ProcessElement
+    public void processElement(ProcessContext c, BoundedWindow window) {
+      IntervalWindow w = (IntervalWindow) window;
       int duration = new Duration(
           w.start(), w.end()).toPeriod().toStandardMinutes().getMinutes();
       c.output(duration);
@@ -281,8 +279,8 @@ public class GameStats extends LeaderBoard {
       // Filter out the detected spammer users, using the side input derived above.
       .apply("FilterOutSpammers", ParDo
               .withSideInputs(spammersView)
-              .of(new OldDoFn<GameActionInfo, GameActionInfo>() {
-                @Override
+              .of(new DoFn<GameActionInfo, GameActionInfo>() {
+                @ProcessElement
                 public void processElement(ProcessContext c) {
                   // If the user is not in the spammers Map, output the data element.
                   if (c.sideInput(spammersView).get(c.element().getUser().trim()) == null) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
index 00dc8a4..c97eb41 100644
--- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
+++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
@@ -28,8 +28,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Aggregator;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.MapElements;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -123,14 +123,14 @@ public class UserScore {
    * user2_AsparagusPig,AsparagusPig,10,1445230923951,2015-11-02 09:09:28.224
    * The human-readable time string is not used here.
    */
-  static class ParseEventFn extends OldDoFn<String, GameActionInfo> {
+  static class ParseEventFn extends DoFn<String, GameActionInfo> {
 
     // Log and count parse errors.
     private static final Logger LOG = LoggerFactory.getLogger(ParseEventFn.class);
     private final Aggregator<Long, Long> numParseErrors =
         createAggregator("ParseErrors", new Sum.SumLongFn());
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       String[] components = c.element().split(",");
       try {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/71e027dc/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
index 01efad8..75d371a 100644
--- a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
+++ b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
@@ -83,7 +83,7 @@ public class UserScoreTest implements Serializable {
       KV.of("AndroidGreenKookaburra", 23),
       KV.of("BisqueBilby", 14));
 
-  /** Test the ParseEventFn OldDoFn. */
+  /** Test the {@link ParseEventFn} {@link DoFn}. */
   @Test
   public void testParseEventFn() throws Exception {
     DoFnTester<String, GameActionInfo> parseEventFn =


[13/51] [abbrv] incubator-beam git commit: Port WindowedWordCount example from OldDoFn to DoFn

Posted by ke...@apache.org.
Port WindowedWordCount example from OldDoFn to DoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/ca9e3372
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/ca9e3372
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/ca9e3372

Branch: refs/heads/python-sdk
Commit: ca9e337203208c7c5876f0710fb3a45430a5b3a8
Parents: 4ceec0e
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:29:01 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700

----------------------------------------------------------------------
 .../org/apache/beam/examples/WindowedWordCount.java   | 14 +++++++-------
 1 file changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ca9e3372/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
index 17f7da3..842cb54 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
 import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.windowing.FixedWindows;
 import org.apache.beam.sdk.transforms.windowing.Window;
@@ -103,14 +103,14 @@ public class WindowedWordCount {
     static final int WINDOW_SIZE = 1;  // Default window duration in minutes
 
   /**
-   * Concept #2: A OldDoFn that sets the data element timestamp. This is a silly method, just for
+   * Concept #2: A DoFn that sets the data element timestamp. This is a silly method, just for
    * this example, for the bounded data case.
    *
    * <p>Imagine that many ghosts of Shakespeare are all typing madly at the same time to recreate
    * his masterworks. Each line of the corpus will get a random associated timestamp somewhere in a
    * 2-hour period.
    */
-  static class AddTimestampFn extends OldDoFn<String, String> {
+  static class AddTimestampFn extends DoFn<String, String> {
     private static final Duration RAND_RANGE = Duration.standardHours(2);
     private final Instant minTimestamp;
 
@@ -118,7 +118,7 @@ public class WindowedWordCount {
       this.minTimestamp = new Instant(System.currentTimeMillis());
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       // Generate a timestamp that falls somewhere in the past two hours.
       long randMillis = (long) (Math.random() * RAND_RANGE.getMillis());
@@ -130,9 +130,9 @@ public class WindowedWordCount {
     }
   }
 
-  /** A OldDoFn that converts a Word and Count into a BigQuery table row. */
-  static class FormatAsTableRowFn extends OldDoFn<KV<String, Long>, TableRow> {
-    @Override
+  /** A DoFn that converts a Word and Count into a BigQuery table row. */
+  static class FormatAsTableRowFn extends DoFn<KV<String, Long>, TableRow> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       TableRow row = new TableRow()
           .set("word", c.element().getKey())


[16/51] [abbrv] incubator-beam git commit: Run findbugs in the test-compile phase

Posted by ke...@apache.org.
Run findbugs in the test-compile phase


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/14c6d99e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/14c6d99e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/14c6d99e

Branch: refs/heads/python-sdk
Commit: 14c6d99e087b2e1606422821341136a5d5e8ec23
Parents: 9a329aa
Author: Kenneth Knowles <kl...@google.com>
Authored: Wed Aug 3 21:31:17 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Wed Aug 3 21:31:17 2016 -0700

----------------------------------------------------------------------
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/14c6d99e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 23b7e4d..9e58ffe 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1029,7 +1029,7 @@
 
           <executions>
             <execution>
-              <phase>test</phase>
+              <phase>test-compile</phase>
               <goals>
                 <goal>check</goal>
               </goals>


[41/51] [abbrv] incubator-beam git commit: Port easy I/O transforms to new DoFn

Posted by ke...@apache.org.
Port easy I/O transforms to new DoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/269fbf38
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/269fbf38
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/269fbf38

Branch: refs/heads/python-sdk
Commit: 269fbf386454ea77845e54764a125edba7039b03
Parents: ef5e31f
Author: Kenneth Knowles <kl...@google.com>
Authored: Wed Aug 3 20:22:26 2016 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Thu Aug 4 14:56:42 2016 -0700

----------------------------------------------------------------------
 .../beam/runners/dataflow/DataflowRunner.java   |  3 +-
 .../java/org/apache/beam/sdk/io/PubsubIO.java   | 14 ++++----
 .../apache/beam/sdk/io/PubsubUnboundedSink.java | 17 +++++----
 .../beam/sdk/io/PubsubUnboundedSource.java      |  7 ++--
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    | 36 +++++++++-----------
 .../beam/sdk/io/gcp/bigtable/BigtableIO.java    | 12 +++----
 .../beam/sdk/io/gcp/datastore/V1Beta3.java      | 18 +++++-----
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     | 10 +++---
 .../sdk/io/gcp/bigtable/BigtableWriteIT.java    |  6 ++--
 .../sdk/io/gcp/datastore/V1Beta3TestUtil.java   |  9 +++--
 .../java/org/apache/beam/sdk/io/jms/JmsIO.java  | 10 +++---
 .../org/apache/beam/sdk/io/kafka/KafkaIO.java   | 19 +++++------
 .../apache/beam/sdk/io/kafka/KafkaIOTest.java   | 10 +++---
 13 files changed, 82 insertions(+), 89 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
index abcf415..fadd9c7 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
@@ -78,6 +78,7 @@ import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.OldDoFn;
@@ -2715,7 +2716,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
     @Nullable
     private PTransform<?, ?> transform;
     @Nullable
-    private OldDoFn<?, ?> doFn;
+    private DoFn<?, ?> doFn;
 
     /**
      * Builds an instance of this class from the overridden transform.

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
index 1902bca..2b27175 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
@@ -25,7 +25,7 @@ import org.apache.beam.sdk.coders.VoidCoder;
 import org.apache.beam.sdk.options.PubsubOptions;
 import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -709,11 +709,11 @@ public class PubsubIO {
        *
        * <p>Public so can be suppressed by runners.
        */
-      public class PubsubBoundedReader extends OldDoFn<Void, T> {
+      public class PubsubBoundedReader extends DoFn<Void, T> {
         private static final int DEFAULT_PULL_SIZE = 100;
         private static final int ACK_TIMEOUT_SEC = 60;
 
-        @Override
+        @ProcessElement
         public void processElement(ProcessContext c) throws IOException {
           try (PubsubClient pubsubClient =
                    FACTORY.newClient(timestampLabel, idLabel,
@@ -998,12 +998,12 @@ public class PubsubIO {
        *
        * <p>Public so can be suppressed by runners.
        */
-      public class PubsubBoundedWriter extends OldDoFn<T, Void> {
+      public class PubsubBoundedWriter extends DoFn<T, Void> {
         private static final int MAX_PUBLISH_BATCH_SIZE = 100;
         private transient List<OutgoingMessage> output;
         private transient PubsubClient pubsubClient;
 
-        @Override
+        @StartBundle
         public void startBundle(Context c) throws IOException {
           this.output = new ArrayList<>();
           // NOTE: idLabel is ignored.
@@ -1012,7 +1012,7 @@ public class PubsubIO {
                                 c.getPipelineOptions().as(PubsubOptions.class));
         }
 
-        @Override
+        @ProcessElement
         public void processElement(ProcessContext c) throws IOException {
           // NOTE: The record id is always null.
           OutgoingMessage message =
@@ -1025,7 +1025,7 @@ public class PubsubIO {
           }
         }
 
-        @Override
+        @FinishBundle
         public void finishBundle(Context c) throws IOException {
           if (!output.isEmpty()) {
             publish();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
index 9e9536d..3014751 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
@@ -31,8 +31,8 @@ import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.coders.VarIntCoder;
 import org.apache.beam.sdk.options.PubsubOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -65,7 +65,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.UUID;
 import java.util.concurrent.ThreadLocalRandom;
-
 import javax.annotation.Nullable;
 
 /**
@@ -78,7 +77,7 @@ import javax.annotation.Nullable;
  * <li>We try to send messages in batches while also limiting send latency.
  * <li>No stats are logged. Rather some counters are used to keep track of elements and batches.
  * <li>Though some background threads are used by the underlying netty system all actual Pubsub
- * calls are blocking. We rely on the underlying runner to allow multiple {@link OldDoFn} instances
+ * calls are blocking. We rely on the underlying runner to allow multiple {@link DoFn} instances
  * to execute concurrently and hide latency.
  * <li>A failed bundle will cause messages to be resent. Thus we rely on the Pubsub consumer
  * to dedup messages.
@@ -155,7 +154,7 @@ public class PubsubUnboundedSink<T> extends PTransform<PCollection<T>, PDone> {
   /**
    * Convert elements to messages and shard them.
    */
-  private static class ShardFn<T> extends OldDoFn<T, KV<Integer, OutgoingMessage>> {
+  private static class ShardFn<T> extends DoFn<T, KV<Integer, OutgoingMessage>> {
     private final Aggregator<Long, Long> elementCounter =
         createAggregator("elements", new Sum.SumLongFn());
     private final Coder<T> elementCoder;
@@ -168,7 +167,7 @@ public class PubsubUnboundedSink<T> extends PTransform<PCollection<T>, PDone> {
       this.recordIdMethod = recordIdMethod;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       elementCounter.addValue(1L);
       byte[] elementBytes = CoderUtils.encodeToByteArray(elementCoder, c.element());
@@ -207,7 +206,7 @@ public class PubsubUnboundedSink<T> extends PTransform<PCollection<T>, PDone> {
    * Publish messages to Pubsub in batches.
    */
   private static class WriterFn
-      extends OldDoFn<KV<Integer, Iterable<OutgoingMessage>>, Void> {
+      extends DoFn<KV<Integer, Iterable<OutgoingMessage>>, Void> {
     private final PubsubClientFactory pubsubFactory;
     private final TopicPath topic;
     private final String timestampLabel;
@@ -253,14 +252,14 @@ public class PubsubUnboundedSink<T> extends PTransform<PCollection<T>, PDone> {
       byteCounter.addValue((long) bytes);
     }
 
-    @Override
+    @StartBundle
     public void startBundle(Context c) throws Exception {
       checkState(pubsubClient == null, "startBundle invoked without prior finishBundle");
       pubsubClient = pubsubFactory.newClient(timestampLabel, idLabel,
                                              c.getPipelineOptions().as(PubsubOptions.class));
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       List<OutgoingMessage> pubsubMessages = new ArrayList<>(publishBatchSize);
       int bytes = 0;
@@ -285,7 +284,7 @@ public class PubsubUnboundedSink<T> extends PTransform<PCollection<T>, PDone> {
       }
     }
 
-    @Override
+    @FinishBundle
     public void finishBundle(Context c) throws Exception {
       pubsubClient.close();
       pubsubClient = null;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
index d98bd6a..f99b471 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
@@ -31,7 +31,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PubsubOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
@@ -77,7 +77,6 @@ import java.util.Queue;
 import java.util.Set;
 import java.util.concurrent.ConcurrentLinkedQueue;
 import java.util.concurrent.atomic.AtomicInteger;
-
 import javax.annotation.Nullable;
 
 /**
@@ -1107,7 +1106,7 @@ public class PubsubUnboundedSource<T> extends PTransform<PBegin, PCollection<T>>
   // StatsFn
   // ================================================================================
 
-  private static class StatsFn<T> extends OldDoFn<T, T> {
+  private static class StatsFn<T> extends DoFn<T, T> {
     private final Aggregator<Long, Long> elementCounter =
         createAggregator("elements", new Sum.SumLongFn());
 
@@ -1131,7 +1130,7 @@ public class PubsubUnboundedSource<T> extends PTransform<PBegin, PCollection<T>>
       this.idLabel = idLabel;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       elementCounter.addValue(1L);
       c.output(c.element());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
index 2ba7562..ed2c32e 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
@@ -42,6 +42,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
@@ -103,7 +104,6 @@ import com.google.common.io.CountingOutputStream;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-
 import org.apache.avro.generic.GenericRecord;
 import org.joda.time.Instant;
 import org.slf4j.Logger;
@@ -135,7 +135,6 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
-
 import javax.annotation.Nullable;
 
 /**
@@ -334,7 +333,7 @@ public class BigQueryIO {
    * <p>Each {@link TableRow} contains values indexed by column name. Here is a
    * sample processing function that processes a "line" column from rows:
    * <pre>{@code
-   * static class ExtractWordsFn extends OldDoFn<TableRow, String> {
+   * static class ExtractWordsFn extends DoFn<TableRow, String> {
    *   public void processElement(ProcessContext c) {
    *     // Get the "line" field of the TableRow object, split it into words, and emit them.
    *     TableRow row = c.element();
@@ -706,8 +705,8 @@ public class BigQueryIO {
       input.getPipeline()
           .apply("Create(CleanupOperation)", Create.of(cleanupOperation))
           .apply("Cleanup", ParDo.of(
-              new OldDoFn<CleanupOperation, Void>() {
-                @Override
+              new DoFn<CleanupOperation, Void>() {
+                @ProcessElement
                 public void processElement(ProcessContext c)
                     throws Exception {
                   c.element().cleanup(c.getPipelineOptions());
@@ -717,8 +716,8 @@ public class BigQueryIO {
       return outputs.get(mainOutput);
     }
 
-    private static class IdentityFn<T> extends OldDoFn<T, T> {
-      @Override
+    private static class IdentityFn<T> extends DoFn<T, T> {
+      @ProcessElement
       public void processElement(ProcessContext c) {
         c.output(c.element());
       }
@@ -1271,7 +1270,7 @@ public class BigQueryIO {
    * <p>Here is a sample transform that produces TableRow values containing
    * "word" and "count" columns:
    * <pre>{@code
-   * static class FormatCountsFn extends OldDoFn<KV<String, Long>, TableRow> {
+   * static class FormatCountsFn extends DoFn<KV<String, Long>, TableRow> {
    *   public void processElement(ProcessContext c) {
    *     TableRow row = new TableRow()
    *         .set("word", c.element().getKey())
@@ -2307,11 +2306,11 @@ public class BigQueryIO {
   /////////////////////////////////////////////////////////////////////////////
 
   /**
-   * Implementation of OldDoFn to perform streaming BigQuery write.
+   * Implementation of DoFn to perform streaming BigQuery write.
    */
   @SystemDoFnInternal
   private static class StreamingWriteFn
-      extends OldDoFn<KV<ShardedKey<String>, TableRowInfo>, Void> {
+      extends DoFn<KV<ShardedKey<String>, TableRowInfo>, Void> {
     /** TableSchema in JSON. Use String to make the class Serializable. */
     private final String jsonTableSchema;
 
@@ -2339,14 +2338,14 @@ public class BigQueryIO {
     }
 
     /** Prepares a target BigQuery table. */
-    @Override
+    @StartBundle
     public void startBundle(Context context) {
       tableRows = new HashMap<>();
       uniqueIdsForTableRows = new HashMap<>();
     }
 
     /** Accumulates the input into JsonTableRows and uniqueIdsForTableRows. */
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext context) {
       String tableSpec = context.element().getKey().getKey();
       List<TableRow> rows = getOrCreateMapListValue(tableRows, tableSpec);
@@ -2357,7 +2356,7 @@ public class BigQueryIO {
     }
 
     /** Writes the accumulated rows into BigQuery with streaming API. */
-    @Override
+    @FinishBundle
     public void finishBundle(Context context) throws Exception {
       BigQueryOptions options = context.getPipelineOptions().as(BigQueryOptions.class);
 
@@ -2544,8 +2543,7 @@ public class BigQueryIO {
    * id is created by concatenating this randomUUID with a sequential number.
    */
   private static class TagWithUniqueIdsAndTable
-      extends OldDoFn<TableRow, KV<ShardedKey<String>, TableRowInfo>>
-      implements OldDoFn.RequiresWindowAccess {
+      extends DoFn<TableRow, KV<ShardedKey<String>, TableRowInfo>> {
     /** TableSpec to write to. */
     private final String tableSpec;
 
@@ -2571,18 +2569,18 @@ public class BigQueryIO {
     }
 
 
-    @Override
+    @StartBundle
     public void startBundle(Context context) {
       randomUUID = UUID.randomUUID().toString();
     }
 
     /** Tag the input with a unique id. */
-    @Override
-    public void processElement(ProcessContext context) throws IOException {
+    @ProcessElement
+    public void processElement(ProcessContext context, BoundedWindow window) throws IOException {
       String uniqueId = randomUUID + sequenceNo++;
       ThreadLocalRandom randomGenerator = ThreadLocalRandom.current();
       String tableSpec = tableSpecFromWindow(
-          context.getPipelineOptions().as(BigQueryOptions.class), context.window());
+          context.getPipelineOptions().as(BigQueryOptions.class), window);
       // We output on keys 0-50 to ensure that there's enough batching for
       // BigQuery.
       context.output(KV.of(ShardedKey.of(tableSpec, randomGenerator.nextInt(0, 50)),

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
index 1f77e3e..bfdf4aa 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
@@ -31,7 +31,7 @@ import org.apache.beam.sdk.io.range.ByteKeyRange;
 import org.apache.beam.sdk.io.range.ByteKeyRangeTracker;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.runners.PipelineRunner;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -55,7 +55,6 @@ import com.google.common.util.concurrent.Futures;
 import com.google.protobuf.ByteString;
 
 import io.grpc.Status;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -65,7 +64,6 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.NoSuchElementException;
 import java.util.concurrent.ConcurrentLinkedQueue;
-
 import javax.annotation.Nullable;
 
 /**
@@ -512,7 +510,7 @@ public class BigtableIO {
       return new BigtableServiceImpl(options);
     }
 
-    private class BigtableWriterFn extends OldDoFn<KV<ByteString, Iterable<Mutation>>, Void> {
+    private class BigtableWriterFn extends DoFn<KV<ByteString, Iterable<Mutation>>, Void> {
 
       public BigtableWriterFn(String tableId, BigtableService bigtableService) {
         this.tableId = checkNotNull(tableId, "tableId");
@@ -520,13 +518,13 @@ public class BigtableIO {
         this.failures = new ConcurrentLinkedQueue<>();
       }
 
-      @Override
+      @StartBundle
       public void startBundle(Context c) throws Exception {
         bigtableWriter = bigtableService.openForWriting(tableId);
         recordsWritten = 0;
       }
 
-      @Override
+      @ProcessElement
       public void processElement(ProcessContext c) throws Exception {
         checkForFailures();
         Futures.addCallback(
@@ -534,7 +532,7 @@ public class BigtableIO {
         ++recordsWritten;
       }
 
-      @Override
+      @FinishBundle
       public void finishBundle(Context c) throws Exception {
         bigtableWriter.close();
         bigtableWriter = null;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
index 6f3663a..052feb3 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
@@ -37,9 +37,9 @@ import org.apache.beam.sdk.io.Sink.Writer;
 import org.apache.beam.sdk.options.GcpOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Values;
@@ -478,11 +478,11 @@ public class V1Beta3 {
     }
 
     /**
-     * A {@link OldDoFn} that splits a given query into multiple sub-queries, assigns them unique
+     * A {@link DoFn} that splits a given query into multiple sub-queries, assigns them unique
      * keys and outputs them as {@link KV}.
      */
     @VisibleForTesting
-    static class SplitQueryFn extends OldDoFn<Query, KV<Integer, Query>> {
+    static class SplitQueryFn extends DoFn<Query, KV<Integer, Query>> {
       private final V1Beta3Options options;
       // number of splits to make for a given query
       private final int numSplits;
@@ -505,13 +505,13 @@ public class V1Beta3 {
         this.datastoreFactory = datastoreFactory;
       }
 
-      @Override
+      @StartBundle
       public void startBundle(Context c) throws Exception {
         datastore = datastoreFactory.getDatastore(c.getPipelineOptions(), options.projectId);
         querySplitter = datastoreFactory.getQuerySplitter();
       }
 
-      @Override
+      @ProcessElement
       public void processElement(ProcessContext c) throws Exception {
         int key = 1;
         Query query = c.element();
@@ -559,10 +559,10 @@ public class V1Beta3 {
     }
 
     /**
-     * A {@link OldDoFn} that reads entities from Datastore for each query.
+     * A {@link DoFn} that reads entities from Datastore for each query.
      */
     @VisibleForTesting
-    static class ReadFn extends OldDoFn<Query, Entity> {
+    static class ReadFn extends DoFn<Query, Entity> {
       private final V1Beta3Options options;
       private final V1Beta3DatastoreFactory datastoreFactory;
       // Datastore client
@@ -578,13 +578,13 @@ public class V1Beta3 {
         this.datastoreFactory = datastoreFactory;
       }
 
-      @Override
+      @StartBundle
       public void startBundle(Context c) throws Exception {
         datastore = datastoreFactory.getDatastore(c.getPipelineOptions(), options.getProjectId());
       }
 
       /** Read and output entities for the given query. */
-      @Override
+      @ProcessElement
       public void processElement(ProcessContext context) throws Exception {
         Query query = context.element();
         String namespace = options.getNamespace();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
index 1ea1f94..6d6eb60 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
@@ -22,6 +22,7 @@ import static org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.toJsonString;
 import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
 
 import static com.google.common.base.Preconditions.checkArgument;
+
 import static org.hamcrest.Matchers.hasItem;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
@@ -64,8 +65,8 @@ import org.apache.beam.sdk.testing.SourceTestUtils;
 import org.apache.beam.sdk.testing.SourceTestUtils.ExpectedSplitOutcome;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.DoFnTester;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
 import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -131,7 +132,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.Set;
-
 import javax.annotation.Nullable;
 
 /**
@@ -235,7 +235,7 @@ public class BigQueryIOTest implements Serializable {
     private Object[] pollJobReturns;
     private String executingProject;
     // Both counts will be reset back to zeros after serialization.
-    // This is a work around for OldDoFn's verifyUnmodified check.
+    // This is a work around for DoFn's verifyUnmodified check.
     private transient int startJobCallsCount;
     private transient int pollJobStatusCallsCount;
 
@@ -571,8 +571,8 @@ public class BigQueryIOTest implements Serializable {
         .apply(BigQueryIO.Read.from("non-executing-project:somedataset.sometable")
             .withTestServices(fakeBqServices)
             .withoutValidation())
-        .apply(ParDo.of(new OldDoFn<TableRow, String>() {
-          @Override
+        .apply(ParDo.of(new DoFn<TableRow, String>() {
+          @ProcessElement
           public void processElement(ProcessContext c) throws Exception {
             c.output((String) c.element().get("name"));
           }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
index 83489a5..ee3a6f9 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.CountingInput;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.KV;
 
@@ -108,8 +108,8 @@ public class BigtableWriteIT implements Serializable {
 
     Pipeline p = Pipeline.create(options);
     p.apply(CountingInput.upTo(numRows))
-        .apply(ParDo.of(new OldDoFn<Long, KV<ByteString, Iterable<Mutation>>>() {
-          @Override
+        .apply(ParDo.of(new DoFn<Long, KV<ByteString, Iterable<Mutation>>>() {
+          @ProcessElement
           public void processElement(ProcessContext c) {
             int index = c.element().intValue();
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
index daed1cb..7eaf23e 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
@@ -27,7 +27,7 @@ import static com.google.datastore.v1beta3.client.DatastoreHelper.makeValue;
 
 import org.apache.beam.sdk.options.GcpOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff;
 import org.apache.beam.sdk.util.RetryHttpRequestInitializer;
 
@@ -60,7 +60,6 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 import java.util.UUID;
-
 import javax.annotation.Nullable;
 
 class V1Beta3TestUtil {
@@ -109,9 +108,9 @@ class V1Beta3TestUtil {
   }
 
   /**
-   * A OldDoFn that creates entity for a long number.
+   * A DoFn that creates entity for a long number.
    */
-  static class CreateEntityFn extends OldDoFn<Long, Entity> {
+  static class CreateEntityFn extends DoFn<Long, Entity> {
     private final String kind;
     @Nullable
     private final String namespace;
@@ -124,7 +123,7 @@ class V1Beta3TestUtil {
       ancestorKey = makeAncestorKey(namespace, kind, ancestor);
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       c.output(makeEntity(c.element(), ancestorKey, kind, namespace));
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
index eeb02e6..557fe13 100644
--- a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
+++ b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
@@ -28,7 +28,7 @@ import org.apache.beam.sdk.io.UnboundedSource;
 import org.apache.beam.sdk.io.UnboundedSource.CheckpointMark;
 import org.apache.beam.sdk.io.UnboundedSource.UnboundedReader;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -453,7 +453,7 @@ public class JmsIO {
       checkArgument((queue != null || topic != null), "Either queue or topic is required");
     }
 
-    private static class JmsWriter extends OldDoFn<String, Void> {
+    private static class JmsWriter extends DoFn<String, Void> {
 
       private ConnectionFactory connectionFactory;
       private String queue;
@@ -469,7 +469,7 @@ public class JmsIO {
         this.topic = topic;
       }
 
-      @Override
+      @StartBundle
       public void startBundle(Context c) throws Exception {
         if (producer == null) {
           this.connection = connectionFactory.createConnection();
@@ -486,7 +486,7 @@ public class JmsIO {
         }
       }
 
-      @Override
+      @ProcessElement
       public void processElement(ProcessContext ctx) throws Exception {
         String value = ctx.element();
 
@@ -499,7 +499,7 @@ public class JmsIO {
         }
       }
 
-      @Override
+      @FinishBundle
       public void finishBundle(Context c) throws Exception {
         producer.close();
         producer = null;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
index 2271216..2383105 100644
--- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
+++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
@@ -33,7 +33,7 @@ import org.apache.beam.sdk.io.UnboundedSource.CheckpointMark;
 import org.apache.beam.sdk.io.UnboundedSource.UnboundedReader;
 import org.apache.beam.sdk.io.kafka.KafkaCheckpointMark.PartitionMark;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -94,7 +94,6 @@ import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.SynchronousQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
-
 import javax.annotation.Nullable;
 
 /**
@@ -550,8 +549,8 @@ public class KafkaIO {
       return typedRead
           .apply(begin)
           .apply("Remove Kafka Metadata",
-              ParDo.of(new OldDoFn<KafkaRecord<K, V>, KV<K, V>>() {
-                @Override
+              ParDo.of(new DoFn<KafkaRecord<K, V>, KV<K, V>>() {
+                @ProcessElement
                 public void processElement(ProcessContext ctx) {
                   ctx.output(ctx.element().getKV());
                 }
@@ -1315,8 +1314,8 @@ public class KafkaIO {
     public PDone apply(PCollection<V> input) {
       return input
         .apply("Kafka values with default key",
-          ParDo.of(new OldDoFn<V, KV<Void, V>>() {
-            @Override
+          ParDo.of(new DoFn<V, KV<Void, V>>() {
+            @ProcessElement
             public void processElement(ProcessContext ctx) throws Exception {
               ctx.output(KV.<Void, V>of(null, ctx.element()));
             }
@@ -1326,9 +1325,9 @@ public class KafkaIO {
     }
   }
 
-  private static class KafkaWriter<K, V> extends OldDoFn<KV<K, V>, Void> {
+  private static class KafkaWriter<K, V> extends DoFn<KV<K, V>, Void> {
 
-    @Override
+    @StartBundle
     public void startBundle(Context c) throws Exception {
       // Producer initialization is fairly costly. Move this to future initialization api to avoid
       // creating a producer for each bundle.
@@ -1341,7 +1340,7 @@ public class KafkaIO {
       }
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext ctx) throws Exception {
       checkForFailures();
 
@@ -1351,7 +1350,7 @@ public class KafkaIO {
           new SendCallback());
     }
 
-    @Override
+    @FinishBundle
     public void finishBundle(Context c) throws Exception {
       producer.flush();
       producer.close();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/269fbf38/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
index d7b1921..9a89c36 100644
--- a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
+++ b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
@@ -33,10 +33,10 @@ import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Count;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.Max;
 import org.apache.beam.sdk.transforms.Min;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.RemoveDuplicates;
 import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -280,8 +280,8 @@ public class KafkaIOTest {
     p.run();
   }
 
-  private static class ElementValueDiff extends OldDoFn<Long, Long> {
-    @Override
+  private static class ElementValueDiff extends DoFn<Long, Long> {
+    @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       c.output(c.element() - c.timestamp().getMillis());
     }
@@ -308,8 +308,8 @@ public class KafkaIOTest {
     p.run();
   }
 
-  private static class RemoveKafkaMetadata<K, V> extends OldDoFn<KafkaRecord<K, V>, KV<K, V>> {
-    @Override
+  private static class RemoveKafkaMetadata<K, V> extends DoFn<KafkaRecord<K, V>, KV<K, V>> {
+    @ProcessElement
     public void processElement(ProcessContext ctx) throws Exception {
       ctx.output(ctx.element().getKV());
     }



[45/51] [abbrv] incubator-beam git commit: Remove References to Instant#now in the DirectRunner

Posted by ke...@apache.org.
Remove References to Instant#now in the DirectRunner

The DirectRunner should use exclusively the configured clock to
determine the processing time.


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/7585cfc3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/7585cfc3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/7585cfc3

Branch: refs/heads/python-sdk
Commit: 7585cfc3693800b00c4ccc799c27f0311e9b0cc1
Parents: fcf6b1d
Author: Thomas Groh <tg...@google.com>
Authored: Fri Aug 5 09:58:05 2016 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Fri Aug 5 10:04:21 2016 -0700

----------------------------------------------------------------------
 .../apache/beam/runners/direct/EvaluationContext.java | 14 ++++++++++----
 .../direct/ExecutorServiceParallelExecutor.java       |  5 ++---
 2 files changed, 12 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/7585cfc3/runners/direct-java/src/main/java/org/apache/beam/runners/direct/EvaluationContext.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/EvaluationContext.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/EvaluationContext.java
index 23c139d..94f28e2 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/EvaluationContext.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/EvaluationContext.java
@@ -48,6 +48,8 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Iterables;
 import com.google.common.util.concurrent.MoreExecutors;
 
+import org.joda.time.Instant;
+
 import java.util.Collection;
 import java.util.EnumSet;
 import java.util.List;
@@ -81,6 +83,7 @@ class EvaluationContext {
 
   /** The options that were used to create this {@link Pipeline}. */
   private final DirectOptions options;
+  private final Clock clock;
 
   private final BundleFactory bundleFactory;
   /** The current processing time and event time watermarks and timers. */
@@ -116,6 +119,7 @@ class EvaluationContext {
       Map<AppliedPTransform<?, ?, ?>, String> stepNames,
       Collection<PCollectionView<?>> views) {
     this.options = checkNotNull(options);
+    this.clock = options.getClock();
     this.bundleFactory = checkNotNull(bundleFactory);
     checkNotNull(rootTransforms);
     checkNotNull(valueToConsumers);
@@ -123,9 +127,7 @@ class EvaluationContext {
     checkNotNull(views);
     this.stepNames = stepNames;
 
-    this.watermarkManager =
-        WatermarkManager.create(
-            NanosOffsetClock.create(), rootTransforms, valueToConsumers);
+    this.watermarkManager = WatermarkManager.create(clock, rootTransforms, valueToConsumers);
     this.sideInputContainer = SideInputContainer.create(this, views);
 
     this.applicationStateInternals = new ConcurrentHashMap<>();
@@ -314,7 +316,7 @@ class EvaluationContext {
       AppliedPTransform<?, ?, ?> application, StructuralKey<?> key) {
     StepAndKey stepAndKey = StepAndKey.of(application, key);
     return new DirectExecutionContext(
-        options.getClock(),
+        clock,
         key,
         (CopyOnAccessInMemoryStateInternals<Object>) applicationStateInternals.get(stepAndKey),
         watermarkManager.getWatermarks(application));
@@ -427,4 +429,8 @@ class EvaluationContext {
     }
     return true;
   }
+
+  public Instant now() {
+    return clock.now();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/7585cfc3/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java
index 64836d8..a0a5ec0 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java
@@ -40,7 +40,6 @@ import com.google.common.cache.LoadingCache;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Iterables;
 
-import org.joda.time.Instant;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -433,9 +432,9 @@ final class ExecutorServiceParallelExecutor implements PipelineExecutor {
                       .createKeyedBundle(
                           null, keyTimers.getKey(), (PCollection) transform.getInput())
                       .add(WindowedValue.valueInEmptyWindows(work))
-                      .commit(Instant.now());
-              state.set(ExecutorState.ACTIVE);
+                      .commit(evaluationContext.now());
               scheduleConsumption(transform, bundle, new TimerIterableCompletionCallback(delivery));
+              state.set(ExecutorState.ACTIVE);
             }
           }
         }


[30/51] [abbrv] incubator-beam git commit: [BEAM-124] Flink Running WordCountIT Example

Posted by ke...@apache.org.
[BEAM-124] Flink Running WordCountIT Example


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/d7a02a17
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/d7a02a17
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/d7a02a17

Branch: refs/heads/python-sdk
Commit: d7a02a17b494b6ca3ffa5ce10ef1c3572738a875
Parents: be2758c
Author: Mark Liu <ma...@markliu0.mtv.corp.google.com>
Authored: Tue Aug 2 11:19:03 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Thu Aug 4 11:50:08 2016 -0700

----------------------------------------------------------------------
 examples/java/pom.xml | 8 +++++++-
 1 file changed, 7 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/d7a02a17/examples/java/pom.xml
----------------------------------------------------------------------
diff --git a/examples/java/pom.xml b/examples/java/pom.xml
index d0a6b34..dca2318 100644
--- a/examples/java/pom.xml
+++ b/examples/java/pom.xml
@@ -275,7 +275,6 @@
       <artifactId>beam-runners-direct-java</artifactId>
       <version>${project.version}</version>
       <scope>runtime</scope>
-      <optional>true</optional>
     </dependency>
 
     <dependency>
@@ -285,6 +284,13 @@
     </dependency>
 
     <dependency>
+      <groupId>org.apache.beam</groupId>
+      <artifactId>beam-runners-flink_2.10</artifactId>
+      <version>${project.version}</version>
+      <scope>runtime</scope>
+    </dependency>
+
+    <dependency>
       <groupId>org.slf4j</groupId>
       <artifactId>slf4j-jdk14</artifactId>
       <scope>runtime</scope>


[20/51] [abbrv] incubator-beam git commit: [BEAM-475] Javadoc has invalid references after the recent refactorings + other fixes

Posted by ke...@apache.org.
[BEAM-475] Javadoc has invalid references after the recent refactorings + other fixes

* Remove useless semicolons
* Remove unneeded java keywords/validations and fix Filter<T> style
* Fix invalid Javadoc references and some other documentation issues
* Add rules for unused semicolons and overcomplicated boolean expresions


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/032e1fa6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/032e1fa6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/032e1fa6

Branch: refs/heads/python-sdk
Commit: 032e1fa6b11a7474057df8ef553a5ba2d6cad63d
Parents: 3144363
Author: Isma�l Mej�a <ie...@gmail.com>
Authored: Wed Aug 3 11:03:43 2016 +0200
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 23:03:30 2016 -0700

----------------------------------------------------------------------
 .../src/main/resources/beam/checkstyle.xml      |  5 +++++
 .../main/java/org/apache/beam/sdk/Pipeline.java |  2 +-
 .../org/apache/beam/sdk/coders/JAXBCoder.java   |  2 +-
 .../java/org/apache/beam/sdk/io/AvroIO.java     |  3 +--
 .../org/apache/beam/sdk/io/FileBasedSink.java   |  2 +-
 .../java/org/apache/beam/sdk/io/PubsubIO.java   |  6 +++---
 .../org/apache/beam/sdk/io/package-info.java    |  3 +--
 .../beam/sdk/options/PipelineOptions.java       |  2 +-
 .../sdk/options/PipelineOptionsFactory.java     |  2 +-
 .../beam/sdk/testing/SerializableMatchers.java  | 22 +++++++++++---------
 .../apache/beam/sdk/transforms/CombineFns.java  |  2 +-
 .../beam/sdk/transforms/DoFnReflector.java      |  4 ++--
 .../apache/beam/sdk/transforms/DoFnTester.java  |  2 +-
 .../org/apache/beam/sdk/transforms/Filter.java  |  5 ++---
 .../beam/sdk/transforms/SimpleFunction.java     |  2 +-
 .../windowing/AfterProcessingTime.java          |  5 +++--
 .../beam/sdk/transforms/windowing/PaneInfo.java |  2 +-
 .../beam/sdk/transforms/windowing/Window.java   |  2 +-
 .../org/apache/beam/sdk/util/TimeDomain.java    |  2 +-
 .../apache/beam/sdk/util/WindowingStrategy.java |  6 +++---
 .../util/common/ElementByteSizeObserver.java    |  2 +-
 .../CopyOnAccessInMemoryStateInternals.java     |  4 ++--
 .../sdk/util/state/StateInternalsFactory.java   |  4 ++--
 .../beam/sdk/util/state/StateNamespaces.java    |  2 +-
 .../apache/beam/sdk/util/state/StateTags.java   |  6 +++---
 .../apache/beam/sdk/values/PCollectionList.java |  2 +-
 .../org/apache/beam/sdk/values/POutput.java     |  2 +-
 .../org/apache/beam/sdk/values/TupleTag.java    |  2 +-
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    |  1 -
 .../java/org/apache/beam/sdk/io/jms/JmsIO.java  |  4 +---
 .../org/apache/beam/sdk/io/kafka/KafkaIO.java   |  2 +-
 31 files changed, 57 insertions(+), 55 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/build-tools/src/main/resources/beam/checkstyle.xml
----------------------------------------------------------------------
diff --git a/sdks/java/build-tools/src/main/resources/beam/checkstyle.xml b/sdks/java/build-tools/src/main/resources/beam/checkstyle.xml
index a437561..4bb7428 100644
--- a/sdks/java/build-tools/src/main/resources/beam/checkstyle.xml
+++ b/sdks/java/build-tools/src/main/resources/beam/checkstyle.xml
@@ -316,6 +316,11 @@ page at http://checkstyle.sourceforge.net/config.html -->
       <property name="severity" value="error"/>
     </module>
 
+    <!-- Checks for over-complicated boolean expressions. -->
+    <module name="SimplifyBooleanExpression"/>
+
+    <!-- Detects empty statements (standalone ";" semicolon). -->
+    <module name="EmptyStatement"/>
 
     <!--
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java
index 31ae2dc..e4f3e4a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java
@@ -250,7 +250,7 @@ public class Pipeline {
      */
     public enum CompositeBehavior {
       ENTER_TRANSFORM,
-      DO_NOT_ENTER_TRANSFORM;
+      DO_NOT_ENTER_TRANSFORM
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/JAXBCoder.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/JAXBCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/JAXBCoder.java
index f90eb54..2284aaf 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/JAXBCoder.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/JAXBCoder.java
@@ -110,7 +110,7 @@ public class JAXBCoder<T> extends AtomicCoder<T> {
     }
   }
 
-  private final JAXBContext getContext() throws JAXBException {
+  private JAXBContext getContext() throws JAXBException {
     if (jaxbContext == null) {
       synchronized (this) {
         if (jaxbContext == null) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
index 718461a..28c283d 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
@@ -87,8 +87,7 @@ import javax.annotation.Nullable;
  * the path of the file to write to (e.g., a local filename or sharded
  * filename pattern if running locally, or a Google Cloud Storage
  * filename or sharded filename pattern of the form
- * {@code "gs://<bucket>/<filepath>"}), and optionally
- * {@link AvroIO.Write#named} to specify the name of the pipeline step.
+ * {@code "gs://<bucket>/<filepath>"}).
  *
  * <p>It is required to specify {@link AvroIO.Write#withSchema}. To
  * write specific records, such as Avro-generated classes, provide an

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java
index 8246148..761d49c 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java
@@ -207,7 +207,7 @@ public abstract class FileBasedSink<T> extends Sink<T> {
      */
     public enum TemporaryFileRetention {
       KEEP,
-      REMOVE;
+      REMOVE
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
index 182fa1f..1902bca 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
@@ -365,9 +365,9 @@ public class PubsubIO {
    * the stream.
    *
    * <p>When running with a {@link PipelineRunner} that only supports bounded
-   * {@link PCollection PCollections} (such as {@link DirectRunner}),
-   * only a bounded portion of the input Pub/Sub stream can be processed. As such, either
-   * {@link Bound#maxNumRecords(int)} or {@link Bound#maxReadTime(Duration)} must be set.
+   * {@link PCollection PCollections}, only a bounded portion of the input Pub/Sub stream
+   * can be processed. As such, either {@link Bound#maxNumRecords(int)} or
+   * {@link Bound#maxReadTime(Duration)} must be set.
    */
   public static class Read {
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/io/package-info.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/package-info.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/package-info.java
index 432c5df..c4ff158 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/package-info.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/package-info.java
@@ -17,8 +17,7 @@
  */
 /**
  * Defines transforms for reading and writing common storage formats, including
- * {@link org.apache.beam.sdk.io.AvroIO},
- * {@link org.apache.beam.sdk.io.BigQueryIO}, and
+ * {@link org.apache.beam.sdk.io.AvroIO}, and
  * {@link org.apache.beam.sdk.io.TextIO}.
  *
  * <p>The classes in this package provide {@code Read} transforms that create PCollections

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
index aa9f13e..365f668 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
@@ -234,7 +234,7 @@ public interface PipelineOptions extends HasDisplayData {
   public static enum CheckEnabled {
     OFF,
     WARNING,
-    ERROR;
+    ERROR
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
index 67fa2af..a795fcd 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
@@ -920,7 +920,7 @@ public class PipelineOptionsFactory {
           mismatch.setterPropertyType.getName()));
     } else if (mismatches.size() > 1) {
       StringBuilder builder = new StringBuilder(
-          String.format("Type mismatches between getters and setters detected:"));
+          "Type mismatches between getters and setters detected:");
       for (TypeMismatch mismatch : mismatches) {
         builder.append(String.format(
             "%n  - Property [%s]: Getter is of type [%s] whereas setter is of type [%s].",

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
index 4e0c0be..62a42e4 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
@@ -516,7 +516,7 @@ class SerializableMatchers implements Serializable {
   }
 
   /**
-   * A {@link SerializableMatcher} with identical criteria to {@link Matchers#equalTo()}.
+   * A {@link SerializableMatcher} with identical criteria to {@link Matchers#equalTo(Object)}.
    */
   public static <T extends Serializable> SerializableMatcher<T> equalTo(final T expected) {
     return fromSupplier(new SerializableSupplier<Matcher<T>>() {
@@ -528,7 +528,7 @@ class SerializableMatchers implements Serializable {
   }
 
   /**
-   * A {@link SerializableMatcher} with identical criteria to {@link Matchers#equalTo()}.
+   * A {@link SerializableMatcher} with identical criteria to {@link Matchers#equalTo(Object)}.
    *
    * <p>The expected value of type {@code T} will be serialized using the provided {@link Coder}.
    * It is explicitly <i>not</i> required or expected to be serializable via Java serialization.
@@ -546,7 +546,8 @@ class SerializableMatchers implements Serializable {
   }
 
   /**
-   * A {@link SerializableMatcher} with identical criteria to {@link Matchers#greaterThan()}.
+   * A {@link SerializableMatcher} with identical criteria to
+   * {@link Matchers#greaterThan(Comparable)}.
    */
   public static <T extends Comparable<T> & Serializable> SerializableMatcher<T>
   greaterThan(final T target) {
@@ -559,7 +560,8 @@ class SerializableMatchers implements Serializable {
   }
 
   /**
-   * A {@link SerializableMatcher} with identical criteria to {@link Matchers#greaterThan()}.
+   * A {@link SerializableMatcher} with identical criteria to
+   * {@link Matchers#greaterThan(Comparable)}.
    *
    * <p>The target value of type {@code T} will be serialized using the provided {@link Coder}.
    * It is explicitly <i>not</i> required or expected to be serializable via Java serialization.
@@ -577,7 +579,7 @@ class SerializableMatchers implements Serializable {
 
   /**
    * A {@link SerializableMatcher} with identical criteria to
-   * {@link Matchers#greaterThanOrEqualTo()}.
+   * {@link Matchers#greaterThanOrEqualTo(Comparable)}.
    */
   public static <T extends Comparable<T>> SerializableMatcher<T> greaterThanOrEqualTo(
       final T target) {
@@ -591,7 +593,7 @@ class SerializableMatchers implements Serializable {
 
   /**
    * A {@link SerializableMatcher} with identical criteria to
-   * {@link Matchers#greaterThanOrEqualTo()}.
+   * {@link Matchers#greaterThanOrEqualTo(Comparable)}.
    *
    * <p>The target value of type {@code T} will be serialized using the provided {@link Coder}.
    * It is explicitly <i>not</i> required or expected to be serializable via Java serialization.
@@ -860,7 +862,7 @@ class SerializableMatchers implements Serializable {
   }
 
   /**
-   * A {@link SerializableMatcher} with identical criteria to {@link Matchers#lessThan()}.
+   * A {@link SerializableMatcher} with identical criteria to {@link Matchers#lessThan(Comparable)}.
    */
   public static <T extends Comparable<T> & Serializable> SerializableMatcher<T> lessThan(
       final T target) {
@@ -873,7 +875,7 @@ class SerializableMatchers implements Serializable {
   }
 
   /**
-   * A {@link SerializableMatcher} with identical criteria to {@link Matchers#lessThan()}.
+   * A {@link SerializableMatcher} with identical criteria to {@link Matchers#lessThan(Comparable)}.
    *
    * <p>The target value of type {@code T} will be serialized using the provided {@link Coder}.
    * It is explicitly <i>not</i> required or expected to be serializable via Java serialization.
@@ -891,7 +893,7 @@ class SerializableMatchers implements Serializable {
 
   /**
    * A {@link SerializableMatcher} with identical criteria to
-   * {@link Matchers#lessThanOrEqualTo()}.
+   * {@link Matchers#lessThanOrEqualTo(Comparable)}.
    */
   public static <T extends Comparable<T> & Serializable> SerializableMatcher<T> lessThanOrEqualTo(
       final T target) {
@@ -905,7 +907,7 @@ class SerializableMatchers implements Serializable {
 
   /**
    * A {@link SerializableMatcher} with identical criteria to
-   * {@link Matchers#lessThanOrEqualTo()}.
+   * {@link Matchers#lessThanOrEqualTo(Comparable)}.
    *
    * <p>The target value of type {@code T} will be serialized using the provided {@link Coder}.
    * It is explicitly <i>not</i> required or expected to be serializable via Java serialization.

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
index 777deba..61f4888 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
@@ -261,7 +261,7 @@ public class CombineFns {
   public static class CoCombineResult implements Serializable {
 
     private enum NullValue {
-      INSTANCE;
+      INSTANCE
     }
 
     private final Map<TupleTag<?>, Object> valuesMap;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
index b504cb4..9bdfde8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
@@ -104,7 +104,7 @@ public abstract class DoFnReflector {
     /** Indicates parameters only available in {@code @ProcessElement} methods. */
     PROCESS_ELEMENT_ONLY,
     /** Indicates parameters available in all methods. */
-    EVERYWHERE;
+    EVERYWHERE
   }
 
   /**
@@ -445,7 +445,7 @@ public abstract class DoFnReflector {
           anno, fnClazz, DoFn.class);
 
       if (matches.size() == 0) {
-        if (required == true) {
+        if (required) {
           throw new IllegalStateException(String.format(
               "No method annotated with @%s found in %s",
               anno.getSimpleName(), fnClazz.getName()));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
index f44a9ae..8de1066 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
@@ -132,7 +132,7 @@ public class DoFnTester<InputT, OutputT> {
    */
   public enum CloningBehavior {
     CLONE,
-    DO_NOT_CLONE;
+    DO_NOT_CLONE
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
index 4466874..37cbec1 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
@@ -202,15 +202,14 @@ public class Filter<T> extends PTransform<PCollection<T>, PCollection<T>> {
 
   @Override
   public PCollection<T> apply(PCollection<T> input) {
-    PCollection<T> output = input.apply(ParDo.of(new OldDoFn<T, T>() {
+    return input.apply(ParDo.of(new OldDoFn<T, T>() {
       @Override
       public void processElement(ProcessContext c) {
-        if (predicate.apply(c.element()) == true) {
+        if (predicate.apply(c.element())) {
           c.output(c.element());
         }
       }
     }));
-    return output;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
index 6623c6a..8894352 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
@@ -22,7 +22,7 @@ import org.apache.beam.sdk.values.TypeDescriptor;
 /**
  * A {@link SerializableFunction} which is not a <i>functional interface</i>.
  * Concrete subclasses allow us to infer type information, which in turn aids
- * {@link Coder} inference.
+ * {@link org.apache.beam.sdk.coders.Coder Coder} inference.
  */
 public abstract class SimpleFunction<InputT, OutputT>
     implements SerializableFunction<InputT, OutputT> {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
index 324ab08..4c792df 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
@@ -32,8 +32,9 @@ import javax.annotation.Nullable;
  * {@code AfterProcessingTime} triggers fire based on the current processing time. They operate in
  * the real-time domain.
  *
- * <p>The time at which to fire the timer can be adjusted via the methods in {@link TimeTrigger},
- * such as {@link TimeTrigger#plusDelayOf} or {@link TimeTrigger#alignedTo}.
+ * <p>The time at which to fire the timer can be adjusted via the methods in
+ * {@link AfterDelayFromFirstElement}, such as {@link AfterDelayFromFirstElement#plusDelayOf} or
+ * {@link AfterDelayFromFirstElement#alignedTo}.
  */
 @Experimental(Experimental.Kind.TRIGGER)
 public class AfterProcessingTime extends AfterDelayFromFirstElement {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
index 7917aec..6ec17f9 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
@@ -126,7 +126,7 @@ public final class PaneInfo {
      * This element was not produced in a triggered pane and its relation to input and
      * output watermarks is unknown.
      */
-    UNKNOWN;
+    UNKNOWN
 
     // NOTE: Do not add fields or re-order them. The ordinal is used as part of
     // the encoding.

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
index 03ff481..5b6f4c8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
@@ -155,7 +155,7 @@ public class Window {
      *
      * <p>This is the default behavior.
      */
-    FIRE_IF_NON_EMPTY;
+    FIRE_IF_NON_EMPTY
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimeDomain.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimeDomain.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimeDomain.java
index f03446e..4c93e56 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimeDomain.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimeDomain.java
@@ -39,5 +39,5 @@ public enum TimeDomain {
    * {@code T} until all timers from earlier stages set for a time earlier than {@code T} have
    * fired.
    */
-  SYNCHRONIZED_PROCESSING_TIME;
+  SYNCHRONIZED_PROCESSING_TIME
 }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingStrategy.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingStrategy.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingStrategy.java
index 19b11cd..f5ae812 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingStrategy.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingStrategy.java
@@ -53,7 +53,7 @@ public class WindowingStrategy<T, W extends BoundedWindow> implements Serializab
    */
   public enum AccumulationMode {
     DISCARDING_FIRED_PANES,
-    ACCUMULATING_FIRED_PANES;
+    ACCUMULATING_FIRED_PANES
   }
 
   private static final Duration DEFAULT_ALLOWED_LATENESS = Duration.ZERO;
@@ -278,8 +278,8 @@ public class WindowingStrategy<T, W extends BoundedWindow> implements Serializab
    *
    * <ul>
    *   <li>The {@link WindowFn#getOutputTime} allows adjustments such as that whereby
-   *       {@link SlidingWindows#getOutputTime} moves elements later in time to avoid holding up
-   *       progress downstream.</li>
+   *       {@link org.apache.beam.sdk.transforms.windowing.SlidingWindows#getOutputTime}
+   *       moves elements later in time to avoid holding up progress downstream.</li>
    *   <li>Then, when multiple elements are buffered for output, the output timestamp of the
    *       result is calculated using {@link OutputTimeFn#combine}.</li>
    *   <li>In the case of a merging {@link WindowFn}, the output timestamp when windows merge

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ElementByteSizeObserver.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ElementByteSizeObserver.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ElementByteSizeObserver.java
index 53dd4b9..3e7011b 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ElementByteSizeObserver.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ElementByteSizeObserver.java
@@ -49,7 +49,7 @@ public class ElementByteSizeObserver implements Observer {
     if (counter != null) {
       counter.addValue(elementByteSize);
     }
-  };
+  }
 
   /**
    * Sets byte counting for the current element as lazy. That is, the

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternals.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternals.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternals.java
index 3850f04..3cc34a6 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternals.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternals.java
@@ -122,8 +122,8 @@ public class CopyOnAccessInMemoryStateInternals<K> implements StateInternals<K>
 
   /**
    * A {@link StateTable} that, when a value is retrieved with
-   * {@link StateTable#get(StateNamespace, StateTag)}, first attempts to obtain a copy of existing
-   * {@link State} from an underlying {@link StateTable}.
+   * {@link StateTable#get(StateNamespace, StateTag, StateContext)}, first attempts to obtain a
+   * copy of existing {@link State} from an underlying {@link StateTable}.
    */
   private static class CopyOnAccessInMemoryStateTable<K> extends StateTable<K> {
     private final K key;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateInternalsFactory.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateInternalsFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateInternalsFactory.java
index 54355c7..05c3b77 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateInternalsFactory.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateInternalsFactory.java
@@ -25,8 +25,8 @@ import java.io.Serializable;
 /**
  * A factory for providing {@link StateInternals} for a particular key.
  *
- * <p>Because it will generally be embedded in a {@link DoFn}, albeit at execution time,
- * it is marked {@link Serializable}.
+ * <p>Because it will generally be embedded in a {@link org.apache.beam.sdk.transforms.DoFn DoFn},
+ * albeit at execution time, it is marked {@link Serializable}.
  */
 @Experimental(Kind.STATE)
 public interface StateInternalsFactory<K> {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateNamespaces.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateNamespaces.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateNamespaces.java
index b8de27f..bfee976 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateNamespaces.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateNamespaces.java
@@ -36,7 +36,7 @@ public class StateNamespaces {
   private enum Namespace {
     GLOBAL,
     WINDOW,
-    WINDOW_AND_TRIGGER;
+    WINDOW_AND_TRIGGER
   }
 
   public static StateNamespace global() {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTags.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTags.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTags.java
index 1e23ae2..e50ad8d 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTags.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTags.java
@@ -304,7 +304,7 @@ public class StateTags {
     }
 
     @Override
-    public ValueState<T> bind(StateBinder<? extends Object> visitor) {
+    public ValueState<T> bind(StateBinder<?> visitor) {
       return visitor.bindValue(this, coder);
     }
 
@@ -505,7 +505,7 @@ public class StateTags {
     }
 
     @Override
-    public BagState<T> bind(StateBinder<? extends Object> visitor) {
+    public BagState<T> bind(StateBinder<?> visitor) {
       return visitor.bindBag(this, elemCoder);
     }
 
@@ -551,7 +551,7 @@ public class StateTags {
     }
 
     @Override
-    public WatermarkHoldState<W> bind(StateBinder<? extends Object> visitor) {
+    public WatermarkHoldState<W> bind(StateBinder<?> visitor) {
       return visitor.bindWatermark(this, outputTimeFn);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java
index 0cf4f49..01acca7 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java
@@ -128,7 +128,7 @@ public class PCollectionList<T> implements PInput, POutput {
    * this {@link PCollectionList} plus the given {@link PCollection PCollections} appended to the
    * end, in order.
    *
-   * <p>All the {@link PCollections} in the resulting {@link PCollectionList} must be
+   * <p>All the {@link PCollection PCollections} in the resulting {@link PCollectionList} must be
    * part of the same {@link Pipeline}.
    */
   public PCollectionList<T> and(Iterable<PCollection<T>> pcs) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/values/POutput.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/POutput.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/POutput.java
index 01e4b36..299d55d 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/POutput.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/POutput.java
@@ -66,7 +66,7 @@ public interface POutput {
    * output to make it ready for being used as an input and for running.
    *
    * <p>This includes ensuring that all {@link PCollection PCollections}
-   * have {@link Coder Coders} specified or defaulted.
+   * have {@link org.apache.beam.sdk.coders.Coder Coders} specified or defaulted.
    *
    * <p>Automatically invoked whenever this {@link POutput} is used
    * as a {@link PInput} to another {@link PTransform}, or if never

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java
index ea06479..b281a43 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java
@@ -39,7 +39,7 @@ import java.util.Random;
  * Its generic type parameter allows tracking
  * the static type of things stored in tuples.
  *
- * <p>To aid in assigning default {@link Coder Coders} for results of
+ * <p>To aid in assigning default {@link org.apache.beam.sdk.coders.Coder Coders} for results of
  * side outputs of {@link ParDo}, an output
  * {@link TupleTag} should be instantiated with an extra {@code {}} so
  * it is an instance of an anonymous subclass without generic type

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
index 9fccbf9..8741c9c 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
@@ -926,7 +926,6 @@ public class BigQueryIO {
       if (parseStatus(job) != Status.SUCCEEDED) {
         throw new IOException("Query job failed: " + jobId);
       }
-      return;
     }
 
     private void readObject(ObjectInputStream in) throws ClassNotFoundException, IOException {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
index 342c4fc..eeb02e6 100644
--- a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
+++ b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
@@ -474,9 +474,7 @@ public class JmsIO {
         if (producer == null) {
           this.connection = connectionFactory.createConnection();
           this.connection.start();
-          /**
-           * false means we don't use JMS transaction.
-           */
+          // false means we don't use JMS transaction.
           this.session = this.connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
           Destination destination;
           if (queue != null) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/032e1fa6/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
index eb649a6..2271216 100644
--- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
+++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
@@ -1059,7 +1059,7 @@ public class KafkaIO {
         try {
           offsetConsumer.seekToEnd(p.topicPartition);
           long offset = offsetConsumer.position(p.topicPartition);
-          p.setLatestOffset(offset);;
+          p.setLatestOffset(offset);
         } catch (Exception e) {
           LOG.warn("{}: exception while fetching latest offsets. ignored.",  this, e);
           p.setLatestOffset(-1L); // reset



[22/51] [abbrv] incubator-beam git commit: [BEAM-383] Modified BigQueryIO to write based on number of files and file sizes

Posted by ke...@apache.org.
[BEAM-383] Modified BigQueryIO to write based on number of files and file sizes


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/8db6114e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/8db6114e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/8db6114e

Branch: refs/heads/python-sdk
Commit: 8db6114e2087cafc4369f6ec85b04f978dfb1984
Parents: 595d2d4
Author: Ian Zhou <ia...@google.com>
Authored: Wed Jul 20 15:56:21 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 23:40:27 2016 -0700

----------------------------------------------------------------------
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    | 585 ++++++++++++++-----
 .../sdk/io/gcp/bigquery/BigQueryServices.java   |   7 +
 .../io/gcp/bigquery/BigQueryServicesImpl.java   |  51 +-
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     | 213 ++++++-
 4 files changed, 693 insertions(+), 163 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/8db6114e/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
index 8741c9c..2ba7562 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
@@ -21,6 +21,7 @@ import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
 
+import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.AtomicCoder;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.Coder.Context;
@@ -33,9 +34,6 @@ import org.apache.beam.sdk.coders.VarIntCoder;
 import org.apache.beam.sdk.coders.VoidCoder;
 import org.apache.beam.sdk.io.AvroSource;
 import org.apache.beam.sdk.io.BoundedSource;
-import org.apache.beam.sdk.io.FileBasedSink;
-import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition;
-import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.DatasetService;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.JobService;
 import org.apache.beam.sdk.options.BigQueryOptions;
@@ -44,6 +42,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
@@ -52,7 +51,13 @@ import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.transforms.View;
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.transforms.windowing.DefaultTrigger;
+import org.apache.beam.sdk.transforms.windowing.GlobalWindows;
+import org.apache.beam.sdk.transforms.windowing.Window;
 import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff;
+import org.apache.beam.sdk.util.FileIOChannelFactory;
+import org.apache.beam.sdk.util.GcsIOChannelFactory;
+import org.apache.beam.sdk.util.GcsUtil;
 import org.apache.beam.sdk.util.GcsUtil.GcsUtilFactory;
 import org.apache.beam.sdk.util.IOChannelFactory;
 import org.apache.beam.sdk.util.IOChannelUtils;
@@ -80,6 +85,7 @@ import com.google.api.services.bigquery.model.Job;
 import com.google.api.services.bigquery.model.JobConfigurationExtract;
 import com.google.api.services.bigquery.model.JobConfigurationLoad;
 import com.google.api.services.bigquery.model.JobConfigurationQuery;
+import com.google.api.services.bigquery.model.JobConfigurationTableCopy;
 import com.google.api.services.bigquery.model.JobReference;
 import com.google.api.services.bigquery.model.JobStatistics;
 import com.google.api.services.bigquery.model.JobStatus;
@@ -93,6 +99,7 @@ import com.google.common.base.MoreObjects;
 import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
+import com.google.common.io.CountingOutputStream;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
@@ -110,6 +117,8 @@ import java.io.Serializable;
 import java.nio.channels.Channels;
 import java.nio.channels.WritableByteChannel;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -196,7 +205,8 @@ import javax.annotation.Nullable;
  * <p>See {@link BigQueryIO.Write} for details on how to specify if a write should
  * append to an existing table, replace the table, or verify that the table is
  * empty. Note that the dataset being written to must already exist. Unbounded PCollections can only
- * be written using {@link WriteDisposition#WRITE_EMPTY} or {@link WriteDisposition#WRITE_APPEND}.
+ * be written using {@link Write.WriteDisposition#WRITE_EMPTY} or
+ * {@link Write.WriteDisposition#WRITE_APPEND}.
  *
  * <h3>Sharding BigQuery output tables</h3>
  * <p>A common use case is to dynamically generate BigQuery table names based on
@@ -1412,6 +1422,19 @@ public class BigQueryIO {
      * {@link PCollection} of {@link TableRow TableRows} to a BigQuery table.
      */
     public static class Bound extends PTransform<PCollection<TableRow>, PDone> {
+      // Maximum number of files in a single partition.
+      static final int MAX_NUM_FILES = 10000;
+
+      // Maximum number of bytes in a single partition.
+      static final long MAX_SIZE_BYTES = 3 * (1L << 40);
+
+      // The maximum number of retry jobs.
+      static final int MAX_RETRY_JOBS = 3;
+
+      // The maximum number of retries to poll the status of a job.
+      // It sets to {@code Integer.MAX_VALUE} to block until the BigQuery job finishes.
+      static final int LOAD_JOB_POLL_MAX_RETRIES = Integer.MAX_VALUE;
+
       @Nullable final String jsonTableRef;
 
       @Nullable final SerializableFunction<BoundedWindow, TableReference> tableRefFunction;
@@ -1666,7 +1689,8 @@ public class BigQueryIO {
 
       @Override
       public PDone apply(PCollection<TableRow> input) {
-        BigQueryOptions options = input.getPipeline().getOptions().as(BigQueryOptions.class);
+        Pipeline p = input.getPipeline();
+        BigQueryOptions options = p.getOptions().as(BigQueryOptions.class);
         BigQueryServices bqServices = getBigQueryServices();
 
         // In a streaming job, or when a tablespec function is defined, we use StreamWithDeDup
@@ -1680,13 +1704,13 @@ public class BigQueryIO {
         if (Strings.isNullOrEmpty(table.getProjectId())) {
           table.setProjectId(options.getProject());
         }
-        String jobIdToken = randomUUIDString();
+        String jobIdToken = "beam_job_" + randomUUIDString();
         String tempLocation = options.getTempLocation();
         String tempFilePrefix;
         try {
           IOChannelFactory factory = IOChannelUtils.getFactory(tempLocation);
           tempFilePrefix = factory.resolve(
-                  factory.resolve(tempLocation, "BigQuerySinkTemp"),
+                  factory.resolve(tempLocation, "BigQueryWriteTemp"),
                   jobIdToken);
         } catch (IOException e) {
           throw new RuntimeException(
@@ -1694,16 +1718,120 @@ public class BigQueryIO {
               e);
         }
 
-        return input.apply("Write", org.apache.beam.sdk.io.Write.to(
-            new BigQuerySink(
+        PCollection<String> singleton = p.apply("Create", Create.of(tempFilePrefix));
+
+        PCollection<TableRow> inputInGlobalWindow =
+            input.apply(
+                Window.<TableRow>into(new GlobalWindows())
+                    .triggering(DefaultTrigger.of())
+                    .discardingFiredPanes());
+
+        PCollection<KV<String, Long>> results = inputInGlobalWindow
+            .apply("WriteBundles",
+                ParDo.of(new WriteBundles(tempFilePrefix)));
+
+        TupleTag<KV<Long, List<String>>> multiPartitionsTag =
+            new TupleTag<KV<Long, List<String>>>("multiPartitionsTag") {};
+        TupleTag<KV<Long, List<String>>> singlePartitionTag =
+            new TupleTag<KV<Long, List<String>>>("singlePartitionTag") {};
+
+        PCollectionView<Iterable<KV<String, Long>>> resultsView = results
+            .apply("ResultsView", View.<KV<String, Long>>asIterable());
+        PCollectionTuple partitions = singleton.apply(ParDo
+            .of(new WritePartition(
+                resultsView,
+                multiPartitionsTag,
+                singlePartitionTag))
+            .withSideInputs(resultsView)
+            .withOutputTags(multiPartitionsTag, TupleTagList.of(singlePartitionTag)));
+
+        // Write multiple partitions to separate temporary tables
+        PCollection<String> tempTables = partitions.get(multiPartitionsTag)
+            .apply("MultiPartitionsGroupByKey", GroupByKey.<Long, List<String>>create())
+            .apply("MultiPartitionsWriteTables", ParDo.of(new WriteTables(
+                false,
+                bqServices,
                 jobIdToken,
-                table,
+                tempFilePrefix,
+                toJsonString(table),
                 jsonSchema,
-                getWriteDisposition(),
-                getCreateDisposition(),
+                WriteDisposition.WRITE_EMPTY,
+                CreateDisposition.CREATE_IF_NEEDED)));
+
+        PCollectionView<Iterable<String>> tempTablesView = tempTables
+            .apply("TempTablesView", View.<String>asIterable());
+        singleton.apply(ParDo
+            .of(new WriteRename(
+                bqServices,
+                jobIdToken,
+                toJsonString(table),
+                writeDisposition,
+                createDisposition,
+                tempTablesView))
+            .withSideInputs(tempTablesView));
+
+        // Write single partition to final table
+        partitions.get(singlePartitionTag)
+            .apply("SinglePartitionGroupByKey", GroupByKey.<Long, List<String>>create())
+            .apply("SinglePartitionWriteTables", ParDo.of(new WriteTables(
+                true,
+                bqServices,
+                jobIdToken,
                 tempFilePrefix,
-                input.getCoder(),
-                bqServices)));
+                toJsonString(table),
+                jsonSchema,
+                writeDisposition,
+                createDisposition)));
+
+        return PDone.in(input.getPipeline());
+      }
+
+      private class WriteBundles extends OldDoFn<TableRow, KV<String, Long>> {
+        private TableRowWriter writer = null;
+        private final String tempFilePrefix;
+
+        WriteBundles(String tempFilePrefix) {
+          this.tempFilePrefix = tempFilePrefix;
+        }
+
+        @Override
+        public void processElement(ProcessContext c) throws Exception {
+          if (writer == null) {
+            writer = new TableRowWriter(tempFilePrefix);
+            writer.open(UUID.randomUUID().toString());
+            LOG.debug("Done opening writer {}", writer);
+          }
+          try {
+            writer.write(c.element());
+          } catch (Exception e) {
+            // Discard write result and close the write.
+            try {
+              writer.close();
+              // The writer does not need to be reset, as this OldDoFn cannot be reused.
+            } catch (Exception closeException) {
+              // Do not mask the exception that caused the write to fail.
+              e.addSuppressed(closeException);
+            }
+            throw e;
+          }
+        }
+
+        @Override
+        public void finishBundle(Context c) throws Exception {
+          if (writer != null) {
+            c.output(writer.close());
+            writer = null;
+          }
+        }
+
+        @Override
+        public void populateDisplayData(DisplayData.Builder builder) {
+          super.populateDisplayData(builder);
+
+          builder
+              .addIfNotNull(DisplayData.item("tempFilePrefix", tempFilePrefix)
+                  .withLabel("Temporary File Prefix"));
+        }
       }
 
       @Override
@@ -1784,192 +1912,361 @@ public class BigQueryIO {
       }
     }
 
-    /** Disallow construction of utility class. */
-    private Write() {}
-  }
-
-  /**
-   * {@link BigQuerySink} is implemented as a {@link FileBasedSink}.
-   *
-   * <p>It uses BigQuery load job to import files into BigQuery.
-   */
-  static class BigQuerySink extends FileBasedSink<TableRow> {
-    private final String jobIdToken;
-    @Nullable private final String jsonTable;
-    @Nullable private final String jsonSchema;
-    private final WriteDisposition writeDisposition;
-    private final CreateDisposition createDisposition;
-    private final Coder<TableRow> coder;
-    private final BigQueryServices bqServices;
-
-    public BigQuerySink(
-        String jobIdToken,
-        @Nullable TableReference table,
-        @Nullable String jsonSchema,
-        WriteDisposition writeDisposition,
-        CreateDisposition createDisposition,
-        String tempFile,
-        Coder<TableRow> coder,
-        BigQueryServices bqServices) {
-      super(tempFile, ".json");
-      this.jobIdToken = checkNotNull(jobIdToken, "jobIdToken");
-      if (table == null) {
-        this.jsonTable = null;
-      } else {
-        checkArgument(!Strings.isNullOrEmpty(table.getProjectId()),
-            "Table %s should have a project specified", table);
-        this.jsonTable = toJsonString(table);
-      }
-      this.jsonSchema = jsonSchema;
-      this.writeDisposition = checkNotNull(writeDisposition, "writeDisposition");
-      this.createDisposition = checkNotNull(createDisposition, "createDisposition");
-      this.coder = checkNotNull(coder, "coder");
-      this.bqServices = checkNotNull(bqServices, "bqServices");
-     }
-
-    @Override
-    public FileBasedSink.FileBasedWriteOperation<TableRow> createWriteOperation(
-        PipelineOptions options) {
-      return new BigQueryWriteOperation(this);
-    }
+    static class TableRowWriter {
+      private static final Coder<TableRow> CODER = TableRowJsonCoder.of();
+      private static final byte[] NEWLINE = "\n".getBytes(StandardCharsets.UTF_8);
+      private final String tempFilePrefix;
+      private String id;
+      private String fileName;
+      private WritableByteChannel channel;
+      protected String mimeType = MimeTypes.TEXT;
+      private CountingOutputStream out;
+
+      TableRowWriter(String basename) {
+        this.tempFilePrefix = basename;
+      }
+
+      public final void open(String uId) throws Exception {
+        id = uId;
+        fileName = tempFilePrefix + id;
+        LOG.debug("Opening {}.", fileName);
+        channel = IOChannelUtils.create(fileName, mimeType);
+        try {
+          out = new CountingOutputStream(Channels.newOutputStream(channel));
+          LOG.debug("Writing header to {}.", fileName);
+        } catch (Exception e) {
+          try {
+            LOG.error("Writing header to {} failed, closing channel.", fileName);
+            channel.close();
+          } catch (IOException closeException) {
+            LOG.error("Closing channel for {} failed", fileName);
+          }
+          throw e;
+        }
+        LOG.debug("Starting write of bundle {} to {}.", this.id, fileName);
+      }
 
-    @Override
-    public void populateDisplayData(DisplayData.Builder builder) {
-      super.populateDisplayData(builder);
+      public void write(TableRow value) throws Exception {
+        CODER.encode(value, out, Context.OUTER);
+        out.write(NEWLINE);
+      }
 
-      builder
-          .addIfNotNull(DisplayData.item("schema", jsonSchema)
-            .withLabel("Table Schema"))
-          .addIfNotNull(DisplayData.item("tableSpec", jsonTable)
-            .withLabel("Table Specification"));
+      public final KV<String, Long> close() throws IOException {
+        channel.close();
+        return KV.of(fileName, out.getCount());
+      }
     }
 
-    private static class BigQueryWriteOperation extends FileBasedWriteOperation<TableRow> {
-      // The maximum number of retry load jobs.
-      private static final int MAX_RETRY_LOAD_JOBS = 3;
+    /**
+     * Partitions temporary files based on number of files and file sizes.
+     */
+    static class WritePartition extends OldDoFn<String, KV<Long, List<String>>> {
+      private final PCollectionView<Iterable<KV<String, Long>>> resultsView;
+      private TupleTag<KV<Long, List<String>>> multiPartitionsTag;
+      private TupleTag<KV<Long, List<String>>> singlePartitionTag;
 
-      // The maximum number of retries to poll the status of a load job.
-      // It sets to {@code Integer.MAX_VALUE} to block until the BigQuery job finishes.
-      private static final int LOAD_JOB_POLL_MAX_RETRIES = Integer.MAX_VALUE;
+      public WritePartition(
+          PCollectionView<Iterable<KV<String, Long>>> resultsView,
+          TupleTag<KV<Long, List<String>>> multiPartitionsTag,
+          TupleTag<KV<Long, List<String>>> singlePartitionTag) {
+        this.resultsView = resultsView;
+        this.multiPartitionsTag = multiPartitionsTag;
+        this.singlePartitionTag = singlePartitionTag;
+      }
 
-      private final BigQuerySink bigQuerySink;
+      @Override
+      public void processElement(ProcessContext c) throws Exception {
+        List<KV<String, Long>> results = Lists.newArrayList(c.sideInput(resultsView));
+        if (results.isEmpty()) {
+          TableRowWriter writer = new TableRowWriter(c.element());
+          writer.open(UUID.randomUUID().toString());
+          results.add(writer.close());
+        }
 
-      private BigQueryWriteOperation(BigQuerySink sink) {
-        super(checkNotNull(sink, "sink"));
-        this.bigQuerySink = sink;
+        long partitionId = 0;
+        int currNumFiles = 0;
+        long currSizeBytes = 0;
+        List<String> currResults = Lists.newArrayList();
+        for (int i = 0; i < results.size(); ++i) {
+          KV<String, Long> fileResult = results.get(i);
+          if (currNumFiles + 1 > Bound.MAX_NUM_FILES
+              || currSizeBytes + fileResult.getValue() > Bound.MAX_SIZE_BYTES) {
+            c.sideOutput(multiPartitionsTag, KV.of(++partitionId, currResults));
+            currResults = Lists.newArrayList();
+            currNumFiles = 0;
+            currSizeBytes = 0;
+          }
+          ++currNumFiles;
+          currSizeBytes += fileResult.getValue();
+          currResults.add(fileResult.getKey());
+        }
+        if (partitionId == 0) {
+          c.sideOutput(singlePartitionTag, KV.of(++partitionId, currResults));
+        } else {
+          c.sideOutput(multiPartitionsTag, KV.of(++partitionId, currResults));
+        }
       }
 
       @Override
-      public FileBasedWriter<TableRow> createWriter(PipelineOptions options) throws Exception {
-        return new TableRowWriter(this, bigQuerySink.coder);
+      public void populateDisplayData(DisplayData.Builder builder) {
+        super.populateDisplayData(builder);
+      }
+    }
+
+    /**
+     * Writes partitions to BigQuery tables.
+     */
+    static class WriteTables extends OldDoFn<KV<Long, Iterable<List<String>>>, String> {
+      private final boolean singlePartition;
+      private final BigQueryServices bqServices;
+      private final String jobIdToken;
+      private final String tempFilePrefix;
+      private final String jsonTableRef;
+      private final String jsonSchema;
+      private final WriteDisposition writeDisposition;
+      private final CreateDisposition createDisposition;
+
+      public WriteTables(
+          boolean singlePartition,
+          BigQueryServices bqServices,
+          String jobIdToken,
+          String tempFilePrefix,
+          String jsonTableRef,
+          String jsonSchema,
+          WriteDisposition writeDisposition,
+          CreateDisposition createDisposition) {
+        this.singlePartition = singlePartition;
+        this.bqServices = bqServices;
+        this.jobIdToken = jobIdToken;
+        this.tempFilePrefix = tempFilePrefix;
+        this.jsonTableRef = jsonTableRef;
+        this.jsonSchema = jsonSchema;
+        this.writeDisposition = writeDisposition;
+        this.createDisposition = createDisposition;
       }
 
       @Override
-      public void finalize(Iterable<FileResult> writerResults, PipelineOptions options)
-          throws IOException, InterruptedException {
-        try {
-          BigQueryOptions bqOptions = options.as(BigQueryOptions.class);
-          List<String> tempFiles = Lists.newArrayList();
-          for (FileResult result : writerResults) {
-            tempFiles.add(result.getFilename());
-          }
-          if (!tempFiles.isEmpty()) {
-              load(
-                  bigQuerySink.bqServices.getJobService(bqOptions),
-                  bigQuerySink.jobIdToken,
-                  fromJsonString(bigQuerySink.jsonTable, TableReference.class),
-                  tempFiles,
-                  fromJsonString(bigQuerySink.jsonSchema, TableSchema.class),
-                  bigQuerySink.writeDisposition,
-                  bigQuerySink.createDisposition);
-          }
-        } finally {
-          removeTemporaryFiles(options);
+      public void processElement(ProcessContext c) throws Exception {
+        List<String> partition = Lists.newArrayList(c.element().getValue()).get(0);
+        String jobIdPrefix = String.format(jobIdToken + "_%05d", c.element().getKey());
+        TableReference ref = fromJsonString(jsonTableRef, TableReference.class);
+        if (!singlePartition) {
+          ref.setTableId(jobIdPrefix);
         }
+
+        load(
+            bqServices.getJobService(c.getPipelineOptions().as(BigQueryOptions.class)),
+            jobIdPrefix,
+            ref,
+            fromJsonString(jsonSchema, TableSchema.class),
+            partition,
+            writeDisposition,
+            createDisposition);
+        c.output(toJsonString(ref));
+
+        removeTemporaryFiles(c.getPipelineOptions(), partition);
       }
 
-      /**
-       * Import files into BigQuery with load jobs.
-       *
-       * <p>Returns if files are successfully loaded into BigQuery.
-       * Throws a RuntimeException if:
-       *     1. The status of one load job is UNKNOWN. This is to avoid duplicating data.
-       *     2. It exceeds {@code MAX_RETRY_LOAD_JOBS}.
-       *
-       * <p>If a load job failed, it will try another load job with a different job id.
-       */
       private void load(
           JobService jobService,
           String jobIdPrefix,
           TableReference ref,
-          List<String> gcsUris,
           @Nullable TableSchema schema,
+          List<String> gcsUris,
           WriteDisposition writeDisposition,
           CreateDisposition createDisposition) throws InterruptedException, IOException {
         JobConfigurationLoad loadConfig = new JobConfigurationLoad()
-            .setSourceUris(gcsUris)
             .setDestinationTable(ref)
             .setSchema(schema)
+            .setSourceUris(gcsUris)
             .setWriteDisposition(writeDisposition.name())
             .setCreateDisposition(createDisposition.name())
             .setSourceFormat("NEWLINE_DELIMITED_JSON");
 
-        boolean retrying = false;
         String projectId = ref.getProjectId();
-        for (int i = 0; i < MAX_RETRY_LOAD_JOBS; ++i) {
+        for (int i = 0; i < Bound.MAX_RETRY_JOBS; ++i) {
           String jobId = jobIdPrefix + "-" + i;
-          if (retrying) {
-            LOG.info("Previous load jobs failed, retrying.");
-          }
-          LOG.info("Starting BigQuery load job: {}", jobId);
+          LOG.info("Starting BigQuery load job {}: try {}/{}", jobId, i, Bound.MAX_RETRY_JOBS);
           JobReference jobRef = new JobReference()
               .setProjectId(projectId)
               .setJobId(jobId);
           jobService.startLoadJob(jobRef, loadConfig);
           Status jobStatus =
-              parseStatus(jobService.pollJob(jobRef, LOAD_JOB_POLL_MAX_RETRIES));
+              parseStatus(jobService.pollJob(jobRef, Bound.LOAD_JOB_POLL_MAX_RETRIES));
           switch (jobStatus) {
             case SUCCEEDED:
               return;
             case UNKNOWN:
-              throw new RuntimeException("Failed to poll the load job status.");
+              throw new RuntimeException("Failed to poll the load job status of job " + jobId);
             case FAILED:
               LOG.info("BigQuery load job failed: {}", jobId);
-              retrying = true;
               continue;
             default:
-              throw new IllegalStateException("Unexpected job status: " + jobStatus);
+              throw new IllegalStateException(String.format("Unexpected job status: %s of job %s",
+                  jobStatus, jobId));
           }
         }
-        throw new RuntimeException(
-            "Failed to create the load job, reached max retries: " + MAX_RETRY_LOAD_JOBS);
+        throw new RuntimeException(String.format("Failed to create the load job %s, reached max "
+            + "retries: %d", jobIdPrefix, Bound.MAX_RETRY_JOBS));
+      }
+
+      private void removeTemporaryFiles(PipelineOptions options, Collection<String> matches)
+          throws IOException {
+        String pattern = tempFilePrefix + "*";
+        LOG.debug("Finding temporary files matching {}.", pattern);
+        IOChannelFactory factory = IOChannelUtils.getFactory(pattern);
+        if (factory instanceof GcsIOChannelFactory) {
+          GcsUtil gcsUtil = new GcsUtil.GcsUtilFactory().create(options);
+          gcsUtil.remove(matches);
+        } else if (factory instanceof FileIOChannelFactory) {
+          for (String filename : matches) {
+            LOG.debug("Removing file {}", filename);
+            boolean exists = Files.deleteIfExists(Paths.get(filename));
+            if (!exists) {
+              LOG.debug("{} does not exist.", filename);
+            }
+          }
+        } else {
+          throw new IOException("Unrecognized file system.");
+        }
       }
-    }
 
-    private static class TableRowWriter extends FileBasedWriter<TableRow> {
-      private static final byte[] NEWLINE = "\n".getBytes(StandardCharsets.UTF_8);
-      private final Coder<TableRow> coder;
-      private OutputStream out;
+      @Override
+      public void populateDisplayData(DisplayData.Builder builder) {
+        super.populateDisplayData(builder);
 
-      public TableRowWriter(
-          FileBasedWriteOperation<TableRow> writeOperation, Coder<TableRow> coder) {
-        super(writeOperation);
-        this.mimeType = MimeTypes.TEXT;
-        this.coder = coder;
+        builder
+            .addIfNotNull(DisplayData.item("jobIdToken", jobIdToken)
+                .withLabel("Job ID Token"))
+            .addIfNotNull(DisplayData.item("tempFilePrefix", tempFilePrefix)
+                .withLabel("Temporary File Prefix"))
+            .addIfNotNull(DisplayData.item("jsonTableRef", jsonTableRef)
+                .withLabel("Table Reference"))
+            .addIfNotNull(DisplayData.item("jsonSchema", jsonSchema)
+                .withLabel("Table Schema"));
+      }
+    }
+
+    /**
+     * Copies temporary tables to destination table.
+     */
+    static class WriteRename extends OldDoFn<String, Void> {
+      private final BigQueryServices bqServices;
+      private final String jobIdToken;
+      private final String jsonTableRef;
+      private final WriteDisposition writeDisposition;
+      private final CreateDisposition createDisposition;
+      private final PCollectionView<Iterable<String>> tempTablesView;
+
+      public WriteRename(
+          BigQueryServices bqServices,
+          String jobIdToken,
+          String jsonTableRef,
+          WriteDisposition writeDisposition,
+          CreateDisposition createDisposition,
+          PCollectionView<Iterable<String>> tempTablesView) {
+        this.bqServices = bqServices;
+        this.jobIdToken = jobIdToken;
+        this.jsonTableRef = jsonTableRef;
+        this.writeDisposition = writeDisposition;
+        this.createDisposition = createDisposition;
+        this.tempTablesView = tempTablesView;
       }
 
       @Override
-      protected void prepareWrite(WritableByteChannel channel) throws Exception {
-        out = Channels.newOutputStream(channel);
+      public void processElement(ProcessContext c) throws Exception {
+        List<String> tempTablesJson = Lists.newArrayList(c.sideInput(tempTablesView));
+
+        // Do not copy if not temp tables are provided
+        if (tempTablesJson.size() == 0) {
+          return;
+        }
+
+        List<TableReference> tempTables = Lists.newArrayList();
+        for (String table : tempTablesJson) {
+          tempTables.add(fromJsonString(table, TableReference.class));
+        }
+        copy(
+            bqServices.getJobService(c.getPipelineOptions().as(BigQueryOptions.class)),
+            jobIdToken,
+            fromJsonString(jsonTableRef, TableReference.class),
+            tempTables,
+            writeDisposition,
+            createDisposition);
+
+        DatasetService tableService =
+            bqServices.getDatasetService(c.getPipelineOptions().as(BigQueryOptions.class));
+        removeTemporaryTables(tableService, tempTables);
+      }
+
+      private void copy(
+          JobService jobService,
+          String jobIdPrefix,
+          TableReference ref,
+          List<TableReference> tempTables,
+          WriteDisposition writeDisposition,
+          CreateDisposition createDisposition) throws InterruptedException, IOException {
+        JobConfigurationTableCopy copyConfig = new JobConfigurationTableCopy()
+            .setSourceTables(tempTables)
+            .setDestinationTable(ref)
+            .setWriteDisposition(writeDisposition.name())
+            .setCreateDisposition(createDisposition.name());
+
+        String projectId = ref.getProjectId();
+        for (int i = 0; i < Bound.MAX_RETRY_JOBS; ++i) {
+          String jobId = jobIdPrefix + "-" + i;
+          LOG.info("Starting BigQuery copy job {}: try {}/{}", jobId, i, Bound.MAX_RETRY_JOBS);
+          JobReference jobRef = new JobReference()
+              .setProjectId(projectId)
+              .setJobId(jobId);
+          jobService.startCopyJob(jobRef, copyConfig);
+          Status jobStatus =
+              parseStatus(jobService.pollJob(jobRef, Bound.LOAD_JOB_POLL_MAX_RETRIES));
+          switch (jobStatus) {
+            case SUCCEEDED:
+              return;
+            case UNKNOWN:
+              throw new RuntimeException("Failed to poll the copy job status of job " + jobId);
+            case FAILED:
+              LOG.info("BigQuery copy job failed: {}", jobId);
+              continue;
+            default:
+              throw new IllegalStateException(String.format("Unexpected job status: %s of job %s",
+                  jobStatus, jobId));
+          }
+        }
+        throw new RuntimeException(String.format("Failed to create the copy job %s, reached max "
+            + "retries: %d", jobIdPrefix, Bound.MAX_RETRY_JOBS));
+      }
+
+      private void removeTemporaryTables(DatasetService tableService,
+          List<TableReference> tempTables) throws Exception {
+        for (TableReference tableRef : tempTables) {
+          tableService.deleteTable(
+              tableRef.getProjectId(),
+              tableRef.getDatasetId(),
+              tableRef.getTableId());
+        }
       }
 
       @Override
-      public void write(TableRow value) throws Exception {
-        // Use Context.OUTER to encode and NEWLINE as the delimeter.
-        coder.encode(value, out, Context.OUTER);
-        out.write(NEWLINE);
+      public void populateDisplayData(DisplayData.Builder builder) {
+        super.populateDisplayData(builder);
+
+        builder
+            .addIfNotNull(DisplayData.item("jobIdToken", jobIdToken)
+                .withLabel("Job ID Token"))
+            .addIfNotNull(DisplayData.item("jsonTableRef", jsonTableRef)
+                .withLabel("Table Reference"))
+            .add(DisplayData.item("writeDisposition", writeDisposition.toString())
+                .withLabel("Write Disposition"))
+            .add(DisplayData.item("createDisposition", createDisposition.toString())
+                .withLabel("Create Disposition"));
       }
     }
+
+    /** Disallow construction of utility class. */
+    private Write() {}
   }
 
   private static void verifyDatasetPresence(DatasetService datasetService, TableReference table) {
@@ -2093,8 +2390,8 @@ public class BigQueryIO {
             TableSchema tableSchema = JSON_FACTORY.fromString(jsonTableSchema, TableSchema.class);
             Bigquery client = Transport.newBigQueryClient(options).build();
             BigQueryTableInserter inserter = new BigQueryTableInserter(client, options);
-            inserter.getOrCreateTable(tableReference, WriteDisposition.WRITE_APPEND,
-                CreateDisposition.CREATE_IF_NEEDED, tableSchema);
+            inserter.getOrCreateTable(tableReference, Write.WriteDisposition.WRITE_APPEND,
+                Write.CreateDisposition.CREATE_IF_NEEDED, tableSchema);
             createdTables.add(tableSpec);
           }
         }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/8db6114e/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java
index 29a335d..0af6df8 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java
@@ -24,6 +24,7 @@ import com.google.api.services.bigquery.model.Job;
 import com.google.api.services.bigquery.model.JobConfigurationExtract;
 import com.google.api.services.bigquery.model.JobConfigurationLoad;
 import com.google.api.services.bigquery.model.JobConfigurationQuery;
+import com.google.api.services.bigquery.model.JobConfigurationTableCopy;
 import com.google.api.services.bigquery.model.JobReference;
 import com.google.api.services.bigquery.model.JobStatistics;
 import com.google.api.services.bigquery.model.Table;
@@ -83,6 +84,12 @@ interface BigQueryServices extends Serializable {
         throws IOException, InterruptedException;
 
     /**
+     * Start a BigQuery copy job.
+     */
+    void startCopyJob(JobReference jobRef, JobConfigurationTableCopy copyConfig)
+        throws IOException, InterruptedException;
+
+    /**
      * Waits for the job is Done, and returns the job.
      *
      * <p>Returns null if the {@code maxAttempts} retries reached.

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/8db6114e/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java
index ef17e0f..bd1097f 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java
@@ -39,6 +39,7 @@ import com.google.api.services.bigquery.model.JobConfiguration;
 import com.google.api.services.bigquery.model.JobConfigurationExtract;
 import com.google.api.services.bigquery.model.JobConfigurationLoad;
 import com.google.api.services.bigquery.model.JobConfigurationQuery;
+import com.google.api.services.bigquery.model.JobConfigurationTableCopy;
 import com.google.api.services.bigquery.model.JobReference;
 import com.google.api.services.bigquery.model.JobStatistics;
 import com.google.api.services.bigquery.model.JobStatus;
@@ -124,9 +125,9 @@ class BigQueryServicesImpl implements BigQueryServices {
     /**
      * {@inheritDoc}
      *
-     * <p> the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
+     * <p>Tries executing the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
      *
-     * @throws IOException if it exceeds max RPC .
+     * @throws IOException if it exceeds {@code MAX_RPC_ATTEMPTS} attempts.
      */
     @Override
     public void startLoadJob(
@@ -142,9 +143,9 @@ class BigQueryServicesImpl implements BigQueryServices {
     /**
      * {@inheritDoc}
      *
-     * <p> the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
+     * <p>Tries executing the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
      *
-     * @throws IOException if it exceeds max RPC .
+     * @throws IOException if it exceeds {@code MAX_RPC_ATTEMPTS} attempts.
      */
     @Override
     public void startExtractJob(JobReference jobRef, JobConfigurationExtract extractConfig)
@@ -160,9 +161,9 @@ class BigQueryServicesImpl implements BigQueryServices {
     /**
      * {@inheritDoc}
      *
-     * <p> the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
+     * <p>Tries executing the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
      *
-     * @throws IOException if it exceeds max RPC .
+     * @throws IOException if it exceeds {@code MAX_RPC_ATTEMPTS} attempts.
      */
     @Override
     public void startQueryJob(JobReference jobRef, JobConfigurationQuery queryConfig)
@@ -175,6 +176,24 @@ class BigQueryServicesImpl implements BigQueryServices {
       startJob(job, errorExtractor, client);
     }
 
+    /**
+     * {@inheritDoc}
+     *
+     * <p>Tries executing the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
+     *
+     * @throws IOException if it exceeds {@code MAX_RPC_ATTEMPTS} attempts.
+     */
+    @Override
+    public void startCopyJob(JobReference jobRef, JobConfigurationTableCopy copyConfig)
+        throws IOException, InterruptedException {
+      Job job = new Job()
+          .setJobReference(jobRef)
+          .setConfiguration(
+              new JobConfiguration().setCopy(copyConfig));
+
+      startJob(job, errorExtractor, client);
+    }
+
     private static void startJob(Job job,
       ApiErrorExtractor errorExtractor,
       Bigquery client) throws IOException, InterruptedException {
@@ -320,9 +339,9 @@ class BigQueryServicesImpl implements BigQueryServices {
     /**
      * {@inheritDoc}
      *
-     * <p> the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
+     * <p>Tries executing the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
      *
-     * @throws IOException if it exceeds max RPC .
+     * @throws IOException if it exceeds {@code MAX_RPC_ATTEMPTS} attempts.
      */
     @Override
     public Table getTable(String projectId, String datasetId, String tableId)
@@ -341,9 +360,9 @@ class BigQueryServicesImpl implements BigQueryServices {
     /**
      * {@inheritDoc}
      *
-     * <p> the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
+     * <p>Tries executing the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
      *
-     * @throws IOException if it exceeds max RPC .
+     * @throws IOException if it exceeds {@code MAX_RPC_ATTEMPTS} attempts.
      */
     @Override
     public void deleteTable(String projectId, String datasetId, String tableId)
@@ -377,9 +396,9 @@ class BigQueryServicesImpl implements BigQueryServices {
     /**
      * {@inheritDoc}
      *
-     * <p> the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
+     * <p>Tries executing the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
      *
-     * @throws IOException if it exceeds max RPC .
+     * @throws IOException if it exceeds {@code MAX_RPC_ATTEMPTS} attempts.
      */
     @Override
     public Dataset getDataset(String projectId, String datasetId)
@@ -398,9 +417,9 @@ class BigQueryServicesImpl implements BigQueryServices {
     /**
      * {@inheritDoc}
      *
-     * <p> the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
+     * <p>Tries executing the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
      *
-     * @throws IOException if it exceeds max RPC .
+     * @throws IOException if it exceeds {@code MAX_RPC_ATTEMPTS} attempts.
      */
     @Override
     public void createDataset(
@@ -456,9 +475,9 @@ class BigQueryServicesImpl implements BigQueryServices {
     /**
      * {@inheritDoc}
      *
-     * <p> the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
+     * <p>Tries executing the RPC for at most {@code MAX_RPC_ATTEMPTS} times until it succeeds.
      *
-     * @throws IOException if it exceeds max RPC .
+     * @throws IOException if it exceeds {@code MAX_RPC_ATTEMPTS} attempts.
      */
     @Override
     public void deleteDataset(String projectId, String datasetId)

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/8db6114e/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
index 7d2df62..1ea1f94 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
@@ -26,14 +26,17 @@ import static org.hamcrest.Matchers.hasItem;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Matchers.eq;
 import static org.mockito.Mockito.when;
 
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.CoderException;
+import org.apache.beam.sdk.coders.KvCoder;
 import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.coders.TableRowJsonCoder;
+import org.apache.beam.sdk.coders.VarLongCoder;
 import org.apache.beam.sdk.io.BoundedSource;
 import org.apache.beam.sdk.io.CountingSource;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.BigQueryQuerySource;
@@ -44,6 +47,9 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Status;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TransformingSource;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition;
+import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WritePartition;
+import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteRename;
+import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteTables;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.DatasetService;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.JobService;
 import org.apache.beam.sdk.options.BigQueryOptions;
@@ -58,16 +64,23 @@ import org.apache.beam.sdk.testing.SourceTestUtils;
 import org.apache.beam.sdk.testing.SourceTestUtils.ExpectedSplitOutcome;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFnTester;
 import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.SerializableFunction;
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.transforms.display.DisplayDataEvaluator;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
 import org.apache.beam.sdk.util.CoderUtils;
 import org.apache.beam.sdk.util.IOChannelFactory;
 import org.apache.beam.sdk.util.IOChannelUtils;
+import org.apache.beam.sdk.util.PCollectionViews;
+import org.apache.beam.sdk.util.WindowingStrategy;
+import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
 
 import com.google.api.client.util.Data;
 import com.google.api.client.util.Strings;
@@ -76,6 +89,7 @@ import com.google.api.services.bigquery.model.Job;
 import com.google.api.services.bigquery.model.JobConfigurationExtract;
 import com.google.api.services.bigquery.model.JobConfigurationLoad;
 import com.google.api.services.bigquery.model.JobConfigurationQuery;
+import com.google.api.services.bigquery.model.JobConfigurationTableCopy;
 import com.google.api.services.bigquery.model.JobReference;
 import com.google.api.services.bigquery.model.JobStatistics;
 import com.google.api.services.bigquery.model.JobStatistics2;
@@ -110,6 +124,9 @@ import java.io.File;
 import java.io.FileFilter;
 import java.io.IOException;
 import java.io.Serializable;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.NoSuchElementException;
@@ -123,6 +140,8 @@ import javax.annotation.Nullable;
 @RunWith(JUnit4.class)
 public class BigQueryIOTest implements Serializable {
 
+  @Rule public transient TemporaryFolder tmpFolder = new TemporaryFolder();
+
   // Status.UNKNOWN maps to null
   private static final Map<Status, Job> JOB_STATUS_MAP = ImmutableMap.of(
       Status.SUCCEEDED, new Job().setStatus(new JobStatus()),
@@ -275,6 +294,12 @@ public class BigQueryIOTest implements Serializable {
     }
 
     @Override
+    public void startCopyJob(JobReference jobRef, JobConfigurationTableCopy copyConfig)
+        throws IOException, InterruptedException {
+      startJob(jobRef);
+    }
+
+    @Override
     public Job pollJob(JobReference jobRef, int maxAttempts)
         throws InterruptedException {
       if (!Strings.isNullOrEmpty(executingProject)) {
@@ -565,7 +590,8 @@ public class BigQueryIOTest implements Serializable {
     FakeBigQueryServices fakeBqServices = new FakeBigQueryServices()
         .withJobService(new FakeJobService()
             .startJobReturns("done", "done", "done")
-            .pollJobReturns(Status.FAILED, Status.FAILED, Status.SUCCEEDED));
+            .pollJobReturns(Status.FAILED, Status.FAILED, Status.SUCCEEDED))
+        .withDatasetService(mockDatasetService);
 
     Pipeline p = TestPipeline.create(bqOptions);
     p.apply(Create.of(
@@ -584,7 +610,6 @@ public class BigQueryIOTest implements Serializable {
     p.run();
 
     logged.verifyInfo("Starting BigQuery load job");
-    logged.verifyInfo("Previous load jobs failed, retrying.");
     File tempDir = new File(bqOptions.getTempLocation());
     assertEquals(0, tempDir.listFiles(new FileFilter() {
       @Override
@@ -613,7 +638,7 @@ public class BigQueryIOTest implements Serializable {
         .withoutValidation());
 
     thrown.expect(RuntimeException.class);
-    thrown.expectMessage("Failed to poll the load job status.");
+    thrown.expectMessage("Failed to poll the load job status");
     p.run();
 
     File tempDir = new File(bqOptions.getTempLocation());
@@ -1228,4 +1253,186 @@ public class BigQueryIOTest implements Serializable {
 
     p.run();
   }
+
+  @Test
+  public void testWritePartitionEmptyData() throws Exception {
+    final long numFiles = 0;
+    final long fileSize = 0;
+
+    // An empty file is created for no input data. One partition is needed.
+    final long expectedNumPartitions = 1;
+    testWritePartition(numFiles, fileSize, expectedNumPartitions);
+  }
+
+  @Test
+  public void testWritePartitionSinglePartition() throws Exception {
+    final long numFiles = BigQueryIO.Write.Bound.MAX_NUM_FILES;
+    final long fileSize = 1;
+
+    // One partition is needed.
+    final long expectedNumPartitions = 1;
+    testWritePartition(numFiles, fileSize, expectedNumPartitions);
+  }
+
+  @Test
+  public void testWritePartitionManyFiles() throws Exception {
+    final long numFiles = BigQueryIO.Write.Bound.MAX_NUM_FILES * 3;
+    final long fileSize = 1;
+
+    // One partition is needed for each group of BigQueryWrite.MAX_NUM_FILES files.
+    final long expectedNumPartitions = 3;
+    testWritePartition(numFiles, fileSize, expectedNumPartitions);
+  }
+
+  @Test
+  public void testWritePartitionLargeFileSize() throws Exception {
+    final long numFiles = 10;
+    final long fileSize = BigQueryIO.Write.Bound.MAX_SIZE_BYTES / 3;
+
+    // One partition is needed for each group of three files.
+    final long expectedNumPartitions = 4;
+    testWritePartition(numFiles, fileSize, expectedNumPartitions);
+  }
+
+  private void testWritePartition(long numFiles, long fileSize, long expectedNumPartitions)
+      throws Exception {
+    final List<Long> expectedPartitionIds = Lists.newArrayList();
+    for (long i = 1; i <= expectedNumPartitions; ++i) {
+      expectedPartitionIds.add(i);
+    }
+
+    final List<KV<String, Long>> files = Lists.newArrayList();
+    final List<String> fileNames = Lists.newArrayList();
+    for (int i = 0; i < numFiles; ++i) {
+      String fileName = String.format("files%05d", i);
+      fileNames.add(fileName);
+      files.add(KV.of(fileName, fileSize));
+    }
+
+    TupleTag<KV<Long, List<String>>> multiPartitionsTag =
+        new TupleTag<KV<Long, List<String>>>("multiPartitionsTag") {};
+    TupleTag<KV<Long, List<String>>> singlePartitionTag =
+        new TupleTag<KV<Long, List<String>>>("singlePartitionTag") {};
+
+    final PCollectionView<Iterable<KV<String, Long>>> filesView = PCollectionViews.iterableView(
+        TestPipeline.create(),
+        WindowingStrategy.globalDefault(),
+        KvCoder.of(StringUtf8Coder.of(), VarLongCoder.of()));
+
+    WritePartition writePartition =
+        new WritePartition(filesView, multiPartitionsTag, singlePartitionTag);
+
+    DoFnTester<String, KV<Long, List<String>>> tester = DoFnTester.of(writePartition);
+    tester.setSideInput(filesView, GlobalWindow.INSTANCE, files);
+    tester.processElement(tmpFolder.getRoot().getAbsolutePath());
+
+    List<KV<Long, List<String>>> partitions;
+    if (expectedNumPartitions > 1) {
+      partitions = tester.takeSideOutputElements(multiPartitionsTag);
+    } else {
+      partitions = tester.takeSideOutputElements(singlePartitionTag);
+    }
+    List<Long> partitionIds = Lists.newArrayList();
+    List<String> partitionFileNames = Lists.newArrayList();
+    for (KV<Long, List<String>> partition : partitions) {
+      partitionIds.add(partition.getKey());
+      for (String name : partition.getValue()) {
+        partitionFileNames.add(name);
+      }
+    }
+
+    assertEquals(expectedPartitionIds, partitionIds);
+    if (numFiles == 0) {
+      assertThat(partitionFileNames, Matchers.hasSize(1));
+      assertTrue(Files.exists(Paths.get(partitionFileNames.get(0))));
+      assertThat(Files.readAllBytes(Paths.get(partitionFileNames.get(0))).length,
+          Matchers.equalTo(0));
+    } else {
+      assertEquals(fileNames, partitionFileNames);
+    }
+  }
+
+  @Test
+  public void testWriteTables() throws Exception {
+    FakeBigQueryServices fakeBqServices = new FakeBigQueryServices()
+        .withJobService(new FakeJobService()
+            .startJobReturns("done", "done", "done", "done")
+            .pollJobReturns(Status.FAILED, Status.SUCCEEDED, Status.SUCCEEDED, Status.SUCCEEDED))
+        .withDatasetService(mockDatasetService);
+
+    final long numPartitions = 3;
+    final long numFilesPerPartition = 10;
+    final String jobIdToken = "jobIdToken";
+    final String tempFilePrefix = "tempFilePrefix";
+    final String jsonTable = "{}";
+    final String jsonSchema = "{}";
+    final List<String> expectedTempTables = Lists.newArrayList();
+
+    final List<KV<Long, Iterable<List<String>>>> partitions = Lists.newArrayList();
+    for (long i = 0; i < numPartitions; ++i) {
+      List<String> filesPerPartition = Lists.newArrayList();
+      for (int j = 0; j < numFilesPerPartition; ++j) {
+        filesPerPartition.add(String.format("files%05d", j));
+      }
+      partitions.add(KV.of(i, (Iterable<List<String>>) Collections.singleton(filesPerPartition)));
+      expectedTempTables.add(String.format("{\"tableId\":\"%s_%05d\"}", jobIdToken, i));
+    }
+
+    WriteTables writeTables = new WriteTables(
+        false,
+        fakeBqServices,
+        jobIdToken,
+        tempFilePrefix,
+        jsonTable,
+        jsonSchema,
+        WriteDisposition.WRITE_EMPTY,
+        CreateDisposition.CREATE_IF_NEEDED);
+
+    DoFnTester<KV<Long, Iterable<List<String>>>, String> tester = DoFnTester.of(writeTables);
+    for (KV<Long, Iterable<List<String>>> partition : partitions) {
+      tester.processElement(partition);
+    }
+
+    List<String> tempTables = tester.takeOutputElements();
+
+    logged.verifyInfo("Starting BigQuery load job");
+
+    assertEquals(expectedTempTables, tempTables);
+  }
+
+  @Test
+  public void testWriteRename() throws Exception {
+    FakeBigQueryServices fakeBqServices = new FakeBigQueryServices()
+        .withJobService(new FakeJobService()
+            .startJobReturns("done", "done")
+            .pollJobReturns(Status.FAILED, Status.SUCCEEDED))
+        .withDatasetService(mockDatasetService);
+
+    final long numTempTables = 3;
+    final String jobIdToken = "jobIdToken";
+    final String jsonTable = "{}";
+    final List<String> tempTables = Lists.newArrayList();
+    for (long i = 0; i < numTempTables; ++i) {
+      tempTables.add(String.format("{\"tableId\":\"%s_%05d\"}", jobIdToken, i));
+    }
+
+    final PCollectionView<Iterable<String>> tempTablesView = PCollectionViews.iterableView(
+        TestPipeline.create(),
+        WindowingStrategy.globalDefault(),
+        StringUtf8Coder.of());
+
+    WriteRename writeRename = new WriteRename(
+        fakeBqServices,
+        jobIdToken,
+        jsonTable,
+        WriteDisposition.WRITE_EMPTY,
+        CreateDisposition.CREATE_IF_NEEDED,
+        tempTablesView);
+
+    DoFnTester<String, Void> tester = DoFnTester.of(writeRename);
+    tester.setSideInput(tempTablesView, GlobalWindow.INSTANCE, tempTables);
+    tester.processElement(null);
+
+    logged.verifyInfo("Starting BigQuery copy job");
+  }
 }


[25/51] [abbrv] incubator-beam git commit: Closes #786

Posted by ke...@apache.org.
Closes #786


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/b8d7559f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/b8d7559f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/b8d7559f

Branch: refs/heads/python-sdk
Commit: b8d7559f6737a0cc6cb747fb9907237ca1215a6d
Parents: 34d5012 8a2cf60
Author: Dan Halperin <dh...@google.com>
Authored: Thu Aug 4 11:46:18 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Thu Aug 4 11:46:18 2016 -0700

----------------------------------------------------------------------
 sdks/java/build-tools/src/main/resources/beam/findbugs-filter.xml | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------



[21/51] [abbrv] incubator-beam git commit: Closes #775

Posted by ke...@apache.org.
Closes #775


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/595d2d4b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/595d2d4b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/595d2d4b

Branch: refs/heads/python-sdk
Commit: 595d2d4baf1f539cbd51d53ae2dffd8fdbd825f7
Parents: 3144363 032e1fa
Author: Dan Halperin <dh...@google.com>
Authored: Wed Aug 3 23:03:54 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 23:03:54 2016 -0700

----------------------------------------------------------------------
 .../src/main/resources/beam/checkstyle.xml      |  5 +++++
 .../main/java/org/apache/beam/sdk/Pipeline.java |  2 +-
 .../org/apache/beam/sdk/coders/JAXBCoder.java   |  2 +-
 .../java/org/apache/beam/sdk/io/AvroIO.java     |  3 +--
 .../org/apache/beam/sdk/io/FileBasedSink.java   |  2 +-
 .../java/org/apache/beam/sdk/io/PubsubIO.java   |  6 +++---
 .../org/apache/beam/sdk/io/package-info.java    |  3 +--
 .../beam/sdk/options/PipelineOptions.java       |  2 +-
 .../sdk/options/PipelineOptionsFactory.java     |  2 +-
 .../beam/sdk/testing/SerializableMatchers.java  | 22 +++++++++++---------
 .../apache/beam/sdk/transforms/CombineFns.java  |  2 +-
 .../beam/sdk/transforms/DoFnReflector.java      |  4 ++--
 .../apache/beam/sdk/transforms/DoFnTester.java  |  2 +-
 .../org/apache/beam/sdk/transforms/Filter.java  |  5 ++---
 .../beam/sdk/transforms/SimpleFunction.java     |  2 +-
 .../windowing/AfterProcessingTime.java          |  5 +++--
 .../beam/sdk/transforms/windowing/PaneInfo.java |  2 +-
 .../beam/sdk/transforms/windowing/Window.java   |  2 +-
 .../org/apache/beam/sdk/util/TimeDomain.java    |  2 +-
 .../apache/beam/sdk/util/WindowingStrategy.java |  6 +++---
 .../util/common/ElementByteSizeObserver.java    |  2 +-
 .../CopyOnAccessInMemoryStateInternals.java     |  4 ++--
 .../sdk/util/state/StateInternalsFactory.java   |  4 ++--
 .../beam/sdk/util/state/StateNamespaces.java    |  2 +-
 .../apache/beam/sdk/util/state/StateTags.java   |  6 +++---
 .../apache/beam/sdk/values/PCollectionList.java |  2 +-
 .../org/apache/beam/sdk/values/POutput.java     |  2 +-
 .../org/apache/beam/sdk/values/TupleTag.java    |  2 +-
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    |  1 -
 .../java/org/apache/beam/sdk/io/jms/JmsIO.java  |  4 +---
 .../org/apache/beam/sdk/io/kafka/KafkaIO.java   |  2 +-
 31 files changed, 57 insertions(+), 55 deletions(-)
----------------------------------------------------------------------



[11/51] [abbrv] incubator-beam git commit: Port AutoComplete example from OldDoFn to DoFn

Posted by ke...@apache.org.
Port AutoComplete example from OldDoFn to DoFn


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/3236eec2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/3236eec2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/3236eec2

Branch: refs/heads/python-sdk
Commit: 3236eec22a8902393e6becefb771b9a4768ccc50
Parents: 49d2f17
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:29:37 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700

----------------------------------------------------------------------
 .../beam/examples/complete/AutoComplete.java    | 30 ++++++++++----------
 1 file changed, 15 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3236eec2/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
index 7b44af8..1ab39c9 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
@@ -36,9 +36,9 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.StreamingOptions;
 import org.apache.beam.sdk.options.Validation;
 import org.apache.beam.sdk.transforms.Count;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Filter;
 import org.apache.beam.sdk.transforms.Flatten;
-import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Partition;
@@ -130,8 +130,8 @@ public class AutoComplete {
 
         // Map the KV outputs of Count into our own CompletionCandiate class.
         .apply("CreateCompletionCandidates", ParDo.of(
-            new OldDoFn<KV<String, Long>, CompletionCandidate>() {
-              @Override
+            new DoFn<KV<String, Long>, CompletionCandidate>() {
+              @ProcessElement
               public void processElement(ProcessContext c) {
                 c.output(new CompletionCandidate(c.element().getKey(), c.element().getValue()));
               }
@@ -209,8 +209,8 @@ public class AutoComplete {
     }
 
     private static class FlattenTops
-        extends OldDoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
-      @Override
+        extends DoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
+      @ProcessElement
       public void processElement(ProcessContext c) {
         for (CompletionCandidate cc : c.element().getValue()) {
           c.output(cc);
@@ -260,10 +260,10 @@ public class AutoComplete {
   }
 
   /**
-   * A OldDoFn that keys each candidate by all its prefixes.
+   * A DoFn that keys each candidate by all its prefixes.
    */
   private static class AllPrefixes
-      extends OldDoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
+      extends DoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
     private final int minPrefix;
     private final int maxPrefix;
     public AllPrefixes(int minPrefix) {
@@ -273,8 +273,8 @@ public class AutoComplete {
       this.minPrefix = minPrefix;
       this.maxPrefix = maxPrefix;
     }
-    @Override
-      public void processElement(ProcessContext c) {
+    @ProcessElement
+    public void processElement(ProcessContext c) {
       String word = c.element().value;
       for (int i = minPrefix; i <= Math.min(word.length(), maxPrefix); i++) {
         c.output(KV.of(word.substring(0, i), c.element()));
@@ -341,8 +341,8 @@ public class AutoComplete {
   /**
    * Takes as input a set of strings, and emits each #hashtag found therein.
    */
-  static class ExtractHashtags extends OldDoFn<String, String> {
-    @Override
+  static class ExtractHashtags extends DoFn<String, String> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       Matcher m = Pattern.compile("#\\S+").matcher(c.element());
       while (m.find()) {
@@ -351,8 +351,8 @@ public class AutoComplete {
     }
   }
 
-  static class FormatForBigquery extends OldDoFn<KV<String, List<CompletionCandidate>>, TableRow> {
-    @Override
+  static class FormatForBigquery extends DoFn<KV<String, List<CompletionCandidate>>, TableRow> {
+    @ProcessElement
     public void processElement(ProcessContext c) {
       List<TableRow> completions = new ArrayList<>();
       for (CompletionCandidate cc : c.element().getValue()) {
@@ -385,14 +385,14 @@ public class AutoComplete {
    * Takes as input a the top candidates per prefix, and emits an entity
    * suitable for writing to Datastore.
    */
-  static class FormatForDatastore extends OldDoFn<KV<String, List<CompletionCandidate>>, Entity> {
+  static class FormatForDatastore extends DoFn<KV<String, List<CompletionCandidate>>, Entity> {
     private String kind;
 
     public FormatForDatastore(String kind) {
       this.kind = kind;
     }
 
-    @Override
+    @ProcessElement
     public void processElement(ProcessContext c) {
       Entity.Builder entityBuilder = Entity.newBuilder();
       Key key = DatastoreHelper.makeKey(kind, c.element().getKey()).build();


[04/51] [abbrv] incubator-beam git commit: Rename DoFn to OldDoFn

Posted by ke...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java
index 8a83e44..b27163a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java
@@ -24,7 +24,7 @@ import static com.google.common.base.Preconditions.checkState;
 import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn.DelegatingAggregator;
+import org.apache.beam.sdk.transforms.OldDoFn.DelegatingAggregator;
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.transforms.display.HasDisplayData;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -127,7 +127,7 @@ public abstract class DoFnWithContext<InputT, OutputT> implements Serializable,
      *
      * <p>If invoked from {@link ProcessElement}), the timestamp
      * must not be older than the input element's timestamp minus
-     * {@link DoFn#getAllowedTimestampSkew}.  The output element will
+     * {@link OldDoFn#getAllowedTimestampSkew}.  The output element will
      * be in the same windows as the input element.
      *
      * <p>If invoked from {@link StartBundle} or {@link FinishBundle},
@@ -176,7 +176,7 @@ public abstract class DoFnWithContext<InputT, OutputT> implements Serializable,
      *
      * <p>If invoked from {@link ProcessElement}), the timestamp
      * must not be older than the input element's timestamp minus
-     * {@link DoFn#getAllowedTimestampSkew}.  The output element will
+     * {@link OldDoFn#getAllowedTimestampSkew}.  The output element will
      * be in the same windows as the input element.
      *
      * <p>If invoked from {@link StartBundle} or {@link FinishBundle},
@@ -194,7 +194,7 @@ public abstract class DoFnWithContext<InputT, OutputT> implements Serializable,
   }
 
   /**
-   * Information accessible when running {@link DoFn#processElement}.
+   * Information accessible when running {@link OldDoFn#processElement}.
    */
   public abstract class ProcessContext extends Context {
 
@@ -358,13 +358,13 @@ public abstract class DoFnWithContext<InputT, OutputT> implements Serializable,
   /**
    * Returns an {@link Aggregator} with aggregation logic specified by the
    * {@link CombineFn} argument. The name provided must be unique across
-   * {@link Aggregator}s created within the DoFn. Aggregators can only be created
+   * {@link Aggregator}s created within the OldDoFn. Aggregators can only be created
    * during pipeline construction.
    *
    * @param name the name of the aggregator
    * @param combiner the {@link CombineFn} to use in the aggregator
    * @return an aggregator for the provided name and combiner in the scope of
-   *         this DoFn
+   *         this OldDoFn
    * @throws NullPointerException if the name or combiner is null
    * @throws IllegalArgumentException if the given name collides with another
    *         aggregator in this scope
@@ -391,13 +391,13 @@ public abstract class DoFnWithContext<InputT, OutputT> implements Serializable,
   /**
    * Returns an {@link Aggregator} with the aggregation logic specified by the
    * {@link SerializableFunction} argument. The name provided must be unique
-   * across {@link Aggregator}s created within the DoFn. Aggregators can only be
+   * across {@link Aggregator}s created within the OldDoFn. Aggregators can only be
    * created during pipeline construction.
    *
    * @param name the name of the aggregator
    * @param combiner the {@link SerializableFunction} to use in the aggregator
    * @return an aggregator for the provided name and combiner in the scope of
-   *         this DoFn
+   *         this OldDoFn
    * @throws NullPointerException if the name or combiner is null
    * @throws IllegalArgumentException if the given name collides with another
    *         aggregator in this scope

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
index a31799e..4466874 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
@@ -202,7 +202,7 @@ public class Filter<T> extends PTransform<PCollection<T>, PCollection<T>> {
 
   @Override
   public PCollection<T> apply(PCollection<T> input) {
-    PCollection<T> output = input.apply(ParDo.of(new DoFn<T, T>() {
+    PCollection<T> output = input.apply(ParDo.of(new OldDoFn<T, T>() {
       @Override
       public void processElement(ProcessContext c) {
         if (predicate.apply(c.element()) == true) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
index 4f270a7..b48da38 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
@@ -133,7 +133,7 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
 
   @Override
   public PCollection<OutputT> apply(PCollection<InputT> input) {
-    return input.apply("Map", ParDo.of(new DoFn<InputT, OutputT>() {
+    return input.apply("Map", ParDo.of(new OldDoFn<InputT, OutputT>() {
       private static final long serialVersionUID = 0L;
       @Override
       public void processElement(ProcessContext c) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
index 0b83fb6..53e898e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
@@ -174,7 +174,7 @@ public class Flatten {
       Coder<T> elemCoder = ((IterableLikeCoder<T, ?>) inCoder).getElemCoder();
 
       return in.apply("FlattenIterables", ParDo.of(
-          new DoFn<Iterable<T>, T>() {
+          new OldDoFn<Iterable<T>, T>() {
             @Override
             public void processElement(ProcessContext c) {
               for (T i : c.element()) {

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupByKey.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupByKey.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupByKey.java
index 8ad57d2..ed7f411 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupByKey.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupByKey.java
@@ -68,7 +68,7 @@ import org.apache.beam.sdk.values.PCollection.IsBounded;
  * PCollection<KV<String, Iterable<Doc>>> urlToDocs =
  *     urlDocPairs.apply(GroupByKey.<String, Doc>create());
  * PCollection<R> results =
- *     urlToDocs.apply(ParDo.of(new DoFn<KV<String, Iterable<Doc>>, R>() {
+ *     urlToDocs.apply(ParDo.of(new OldDoFn<KV<String, Iterable<Doc>>, R>() {
  *       public void processElement(ProcessContext c) {
  *         String url = c.element().getKey();
  *         Iterable<Doc> docsWithThatUrl = c.element().getValue();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java
index ef1e3c6..b5fe60f 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java
@@ -40,7 +40,7 @@ import java.util.concurrent.Semaphore;
 import java.util.concurrent.atomic.AtomicReference;
 
 /**
- * Provides multi-threading of {@link DoFn}s, using threaded execution to
+ * Provides multi-threading of {@link OldDoFn}s, using threaded execution to
  * process multiple elements concurrently within a bundle.
  *
  * <p>Note, that each Dataflow worker will already process multiple bundles
@@ -57,7 +57,7 @@ import java.util.concurrent.atomic.AtomicReference;
  * share of the maximum write rate) will take at least 6 seconds to complete (there is additional
  * overhead in the extra parallelization).
  *
- * <p>To parallelize a {@link DoFn} to 10 threads:
+ * <p>To parallelize a {@link OldDoFn} to 10 threads:
  * <pre>{@code
  * PCollection<T> data = ...;
  * data.apply(
@@ -65,18 +65,18 @@ import java.util.concurrent.atomic.AtomicReference;
  *                             .withMaxParallelism(10)));
  * }</pre>
  *
- * <p>An uncaught exception from the wrapped {@link DoFn} will result in the exception
+ * <p>An uncaught exception from the wrapped {@link OldDoFn} will result in the exception
  * being rethrown in later calls to {@link MultiThreadedIntraBundleProcessingDoFn#processElement}
  * or a call to {@link MultiThreadedIntraBundleProcessingDoFn#finishBundle}.
  */
 public class IntraBundleParallelization {
   /**
    * Creates a {@link IntraBundleParallelization} {@link PTransform} for the given
-   * {@link DoFn} that processes elements using multiple threads.
+   * {@link OldDoFn} that processes elements using multiple threads.
    *
    * <p>Note that the specified {@code doFn} needs to be thread safe.
    */
-  public static <InputT, OutputT> Bound<InputT, OutputT> of(DoFn<InputT, OutputT> doFn) {
+  public static <InputT, OutputT> Bound<InputT, OutputT> of(OldDoFn<InputT, OutputT> doFn) {
     return new Unbound().of(doFn);
   }
 
@@ -92,7 +92,7 @@ public class IntraBundleParallelization {
    * An incomplete {@code IntraBundleParallelization} transform, with unbound input/output types.
    *
    * <p>Before being applied, {@link IntraBundleParallelization.Unbound#of} must be
-   * invoked to specify the {@link DoFn} to invoke, which will also
+   * invoked to specify the {@link OldDoFn} to invoke, which will also
    * bind the input/output types of this {@code PTransform}.
    */
   public static class Unbound {
@@ -118,18 +118,18 @@ public class IntraBundleParallelization {
 
     /**
      * Returns a new {@link IntraBundleParallelization} {@link PTransform} like this one
-     * with the specified {@link DoFn}.
+     * with the specified {@link OldDoFn}.
      *
      * <p>Note that the specified {@code doFn} needs to be thread safe.
      */
-    public <InputT, OutputT> Bound<InputT, OutputT> of(DoFn<InputT, OutputT> doFn) {
+    public <InputT, OutputT> Bound<InputT, OutputT> of(OldDoFn<InputT, OutputT> doFn) {
       return new Bound<>(doFn, maxParallelism);
     }
   }
 
   /**
    * A {@code PTransform} that, when applied to a {@code PCollection<InputT>},
-   * invokes a user-specified {@code DoFn<InputT, OutputT>} on all its elements,
+   * invokes a user-specified {@code OldDoFn<InputT, OutputT>} on all its elements,
    * with all its outputs collected into an output
    * {@code PCollection<OutputT>}.
    *
@@ -140,10 +140,10 @@ public class IntraBundleParallelization {
    */
   public static class Bound<InputT, OutputT>
       extends PTransform<PCollection<? extends InputT>, PCollection<OutputT>> {
-    private final DoFn<InputT, OutputT> doFn;
+    private final OldDoFn<InputT, OutputT> doFn;
     private final int maxParallelism;
 
-    Bound(DoFn<InputT, OutputT> doFn, int maxParallelism) {
+    Bound(OldDoFn<InputT, OutputT> doFn, int maxParallelism) {
       checkArgument(maxParallelism > 0,
           "Expected parallelism factor greater than zero, received %s.", maxParallelism);
       this.doFn = doFn;
@@ -160,12 +160,12 @@ public class IntraBundleParallelization {
 
     /**
      * Returns a new {@link IntraBundleParallelization} {@link PTransform} like this one
-     * with the specified {@link DoFn}.
+     * with the specified {@link OldDoFn}.
      *
      * <p>Note that the specified {@code doFn} needs to be thread safe.
      */
     public <NewInputT, NewOutputT> Bound<NewInputT, NewOutputT>
-        of(DoFn<NewInputT, NewOutputT> doFn) {
+        of(OldDoFn<NewInputT, NewOutputT> doFn) {
       return new Bound<>(doFn, maxParallelism);
     }
 
@@ -188,17 +188,19 @@ public class IntraBundleParallelization {
   }
 
   /**
-   * A multi-threaded {@code DoFn} wrapper.
+   * A multi-threaded {@code OldDoFn} wrapper.
    *
-   * @see IntraBundleParallelization#of(DoFn)
+   * @see IntraBundleParallelization#of(OldDoFn)
    *
    * @param <InputT> the type of the (main) input elements
    * @param <OutputT> the type of the (main) output elements
    */
   public static class MultiThreadedIntraBundleProcessingDoFn<InputT, OutputT>
-      extends DoFn<InputT, OutputT> {
+      extends OldDoFn<InputT, OutputT> {
 
-    public MultiThreadedIntraBundleProcessingDoFn(DoFn<InputT, OutputT> doFn, int maxParallelism) {
+    public MultiThreadedIntraBundleProcessingDoFn(
+        OldDoFn<InputT, OutputT> doFn,
+        int maxParallelism) {
       checkArgument(maxParallelism > 0,
           "Expected parallelism factor greater than zero, received %s.", maxParallelism);
       this.doFn = doFn;
@@ -267,7 +269,7 @@ public class IntraBundleParallelization {
     /////////////////////////////////////////////////////////////////////////////
 
     /**
-     * Wraps a DoFn context, forcing single-thread output so that threads don't
+     * Wraps a OldDoFn context, forcing single-thread output so that threads don't
      * propagate through to downstream functions.
      */
     private class WrappedContext extends ProcessContext {
@@ -347,7 +349,7 @@ public class IntraBundleParallelization {
       }
     }
 
-    private final DoFn<InputT, OutputT> doFn;
+    private final OldDoFn<InputT, OutputT> doFn;
     private int maxParallelism;
 
     private transient ExecutorService executor;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
index 636e306..c8cbce8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
@@ -58,7 +58,7 @@ public class Keys<K> extends PTransform<PCollection<? extends KV<K, ?>>,
   @Override
   public PCollection<K> apply(PCollection<? extends KV<K, ?>> in) {
     return
-        in.apply("Keys", ParDo.of(new DoFn<KV<K, ?>, K>() {
+        in.apply("Keys", ParDo.of(new OldDoFn<KV<K, ?>, K>() {
           @Override
           public void processElement(ProcessContext c) {
             c.output(c.element().getKey());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
index 9597c92..430d37b 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
@@ -62,7 +62,7 @@ public class KvSwap<K, V> extends PTransform<PCollection<KV<K, V>>,
   @Override
   public PCollection<KV<V, K>> apply(PCollection<KV<K, V>> in) {
     return
-        in.apply("KvSwap", ParDo.of(new DoFn<KV<K, V>, KV<V, K>>() {
+        in.apply("KvSwap", ParDo.of(new OldDoFn<KV<K, V>, KV<V, K>>() {
           @Override
           public void processElement(ProcessContext c) {
             KV<K, V> e = c.element();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
index f535111..c83c39f 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
@@ -104,7 +104,7 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
 
   @Override
   public PCollection<OutputT> apply(PCollection<InputT> input) {
-    return input.apply("Map", ParDo.of(new DoFn<InputT, OutputT>() {
+    return input.apply("Map", ParDo.of(new OldDoFn<InputT, OutputT>() {
       @Override
       public void processElement(ProcessContext c) {
         c.output(fn.apply(c.element()));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
new file mode 100644
index 0000000..48c6033
--- /dev/null
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
@@ -0,0 +1,565 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.transforms;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
+import org.apache.beam.sdk.annotations.Experimental;
+import org.apache.beam.sdk.annotations.Experimental.Kind;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.transforms.Combine.CombineFn;
+import org.apache.beam.sdk.transforms.display.DisplayData;
+import org.apache.beam.sdk.transforms.display.HasDisplayData;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.transforms.windowing.PaneInfo;
+import org.apache.beam.sdk.util.WindowingInternals;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.TypeDescriptor;
+
+import com.google.common.base.MoreObjects;
+
+import org.joda.time.Duration;
+import org.joda.time.Instant;
+
+import java.io.Serializable;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+import java.util.UUID;
+
+/**
+ * The argument to {@link ParDo} providing the code to use to process
+ * elements of the input
+ * {@link org.apache.beam.sdk.values.PCollection}.
+ *
+ * <p>See {@link ParDo} for more explanation, examples of use, and
+ * discussion of constraints on {@code OldDoFn}s, including their
+ * serializability, lack of access to global shared mutable state,
+ * requirements for failure tolerance, and benefits of optimization.
+ *
+ * <p>{@code OldDoFn}s can be tested in the context of a particular
+ * {@code Pipeline} by running that {@code Pipeline} on sample input
+ * and then checking its output.  Unit testing of a {@code OldDoFn},
+ * separately from any {@code ParDo} transform or {@code Pipeline},
+ * can be done via the {@link DoFnTester} harness.
+ *
+ * <p>{@link DoFnWithContext} (currently experimental) offers an alternative
+ * mechanism for accessing {@link ProcessContext#window()} without the need
+ * to implement {@link RequiresWindowAccess}.
+ *
+ * <p>See also {@link #processElement} for details on implementing the transformation
+ * from {@code InputT} to {@code OutputT}.
+ *
+ * @param <InputT> the type of the (main) input elements
+ * @param <OutputT> the type of the (main) output elements
+ */
+public abstract class OldDoFn<InputT, OutputT> implements Serializable, HasDisplayData {
+
+  /**
+   * Information accessible to all methods in this {@code OldDoFn}.
+   * Used primarily to output elements.
+   */
+  public abstract class Context {
+
+    /**
+     * Returns the {@code PipelineOptions} specified with the
+     * {@link org.apache.beam.sdk.runners.PipelineRunner}
+     * invoking this {@code OldDoFn}.  The {@code PipelineOptions} will
+     * be the default running via {@link DoFnTester}.
+     */
+    public abstract PipelineOptions getPipelineOptions();
+
+    /**
+     * Adds the given element to the main output {@code PCollection}.
+     *
+     * <p>Once passed to {@code output} the element should be considered
+     * immutable and not be modified in any way. It may be cached or retained
+     * by the Dataflow runtime or later steps in the pipeline, or used in
+     * other unspecified ways.
+     *
+     * <p>If invoked from {@link OldDoFn#processElement processElement}, the output
+     * element will have the same timestamp and be in the same windows
+     * as the input element passed to {@link OldDoFn#processElement processElement}.
+     *
+     * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
+     * this will attempt to use the
+     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+     * of the input {@code PCollection} to determine what windows the element
+     * should be in, throwing an exception if the {@code WindowFn} attempts
+     * to access any information about the input element. The output element
+     * will have a timestamp of negative infinity.
+     */
+    public abstract void output(OutputT output);
+
+    /**
+     * Adds the given element to the main output {@code PCollection},
+     * with the given timestamp.
+     *
+     * <p>Once passed to {@code outputWithTimestamp} the element should not be
+     * modified in any way.
+     *
+     * <p>If invoked from {@link OldDoFn#processElement processElement}, the timestamp
+     * must not be older than the input element's timestamp minus
+     * {@link OldDoFn#getAllowedTimestampSkew getAllowedTimestampSkew}.  The output element will
+     * be in the same windows as the input element.
+     *
+     * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
+     * this will attempt to use the
+     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+     * of the input {@code PCollection} to determine what windows the element
+     * should be in, throwing an exception if the {@code WindowFn} attempts
+     * to access any information about the input element except for the
+     * timestamp.
+     */
+    public abstract void outputWithTimestamp(OutputT output, Instant timestamp);
+
+    /**
+     * Adds the given element to the side output {@code PCollection} with the
+     * given tag.
+     *
+     * <p>Once passed to {@code sideOutput} the element should not be modified
+     * in any way.
+     *
+     * <p>The caller of {@code ParDo} uses {@link ParDo#withOutputTags withOutputTags} to
+     * specify the tags of side outputs that it consumes. Non-consumed side
+     * outputs, e.g., outputs for monitoring purposes only, don't necessarily
+     * need to be specified.
+     *
+     * <p>The output element will have the same timestamp and be in the same
+     * windows as the input element passed to {@link OldDoFn#processElement processElement}.
+     *
+     * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
+     * this will attempt to use the
+     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+     * of the input {@code PCollection} to determine what windows the element
+     * should be in, throwing an exception if the {@code WindowFn} attempts
+     * to access any information about the input element. The output element
+     * will have a timestamp of negative infinity.
+     *
+     * @see ParDo#withOutputTags
+     */
+    public abstract <T> void sideOutput(TupleTag<T> tag, T output);
+
+    /**
+     * Adds the given element to the specified side output {@code PCollection},
+     * with the given timestamp.
+     *
+     * <p>Once passed to {@code sideOutputWithTimestamp} the element should not be
+     * modified in any way.
+     *
+     * <p>If invoked from {@link OldDoFn#processElement processElement}, the timestamp
+     * must not be older than the input element's timestamp minus
+     * {@link OldDoFn#getAllowedTimestampSkew getAllowedTimestampSkew}.  The output element will
+     * be in the same windows as the input element.
+     *
+     * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
+     * this will attempt to use the
+     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+     * of the input {@code PCollection} to determine what windows the element
+     * should be in, throwing an exception if the {@code WindowFn} attempts
+     * to access any information about the input element except for the
+     * timestamp.
+     *
+     * @see ParDo#withOutputTags
+     */
+    public abstract <T> void sideOutputWithTimestamp(
+        TupleTag<T> tag, T output, Instant timestamp);
+
+    /**
+     * Creates an {@link Aggregator} in the {@link OldDoFn} context with the
+     * specified name and aggregation logic specified by {@link CombineFn}.
+     *
+     * <p>For internal use only.
+     *
+     * @param name the name of the aggregator
+     * @param combiner the {@link CombineFn} to use in the aggregator
+     * @return an aggregator for the provided name and {@link CombineFn} in this
+     *         context
+     */
+    @Experimental(Kind.AGGREGATOR)
+    protected abstract <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
+        createAggregatorInternal(String name, CombineFn<AggInputT, ?, AggOutputT> combiner);
+
+    /**
+     * Sets up {@link Aggregator}s created by the {@link OldDoFn} so they are
+     * usable within this context.
+     *
+     * <p>This method should be called by runners before {@link OldDoFn#startBundle}
+     * is executed.
+     */
+    @Experimental(Kind.AGGREGATOR)
+    protected final void setupDelegateAggregators() {
+      for (DelegatingAggregator<?, ?> aggregator : aggregators.values()) {
+        setupDelegateAggregator(aggregator);
+      }
+
+      aggregatorsAreFinal = true;
+    }
+
+    private final <AggInputT, AggOutputT> void setupDelegateAggregator(
+        DelegatingAggregator<AggInputT, AggOutputT> aggregator) {
+
+      Aggregator<AggInputT, AggOutputT> delegate = createAggregatorInternal(
+          aggregator.getName(), aggregator.getCombineFn());
+
+      aggregator.setDelegate(delegate);
+    }
+  }
+
+  /**
+   * Information accessible when running {@link OldDoFn#processElement}.
+   */
+  public abstract class ProcessContext extends Context {
+
+    /**
+     * Returns the input element to be processed.
+     *
+     * <p>The element should be considered immutable. The Dataflow runtime will not mutate the
+     * element, so it is safe to cache, etc. The element should not be mutated by any of the
+     * {@link OldDoFn} methods, because it may be cached elsewhere, retained by the Dataflow
+     * runtime, or used in other unspecified ways.
+     */
+    public abstract InputT element();
+
+    /**
+     * Returns the value of the side input for the window corresponding to the
+     * window of the main input element.
+     *
+     * <p>See
+     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn#getSideInputWindow}
+     * for how this corresponding window is determined.
+     *
+     * @throws IllegalArgumentException if this is not a side input
+     * @see ParDo#withSideInputs
+     */
+    public abstract <T> T sideInput(PCollectionView<T> view);
+
+    /**
+     * Returns the timestamp of the input element.
+     *
+     * <p>See {@link org.apache.beam.sdk.transforms.windowing.Window}
+     * for more information.
+     */
+    public abstract Instant timestamp();
+
+    /**
+     * Returns the window into which the input element has been assigned.
+     *
+     * <p>See {@link org.apache.beam.sdk.transforms.windowing.Window}
+     * for more information.
+     *
+     * @throws UnsupportedOperationException if this {@link OldDoFn} does
+     * not implement {@link RequiresWindowAccess}.
+     */
+    public abstract BoundedWindow window();
+
+    /**
+     * Returns information about the pane within this window into which the
+     * input element has been assigned.
+     *
+     * <p>Generally all data is in a single, uninteresting pane unless custom
+     * triggering and/or late data has been explicitly requested.
+     * See {@link org.apache.beam.sdk.transforms.windowing.Window}
+     * for more information.
+     */
+    public abstract PaneInfo pane();
+
+    /**
+     * Returns the process context to use for implementing windowing.
+     */
+    @Experimental
+    public abstract WindowingInternals<InputT, OutputT> windowingInternals();
+  }
+
+  /**
+   * Returns the allowed timestamp skew duration, which is the maximum
+   * duration that timestamps can be shifted backward in
+   * {@link OldDoFn.Context#outputWithTimestamp}.
+   *
+   * <p>The default value is {@code Duration.ZERO}, in which case
+   * timestamps can only be shifted forward to future.  For infinite
+   * skew, return {@code Duration.millis(Long.MAX_VALUE)}.
+   *
+   * <p> Note that producing an element whose timestamp is less than the
+   * current timestamp may result in late data, i.e. returning a non-zero
+   * value here does not impact watermark calculations used for firing
+   * windows.
+   *
+   * @deprecated does not interact well with the watermark.
+   */
+  @Deprecated
+  public Duration getAllowedTimestampSkew() {
+    return Duration.ZERO;
+  }
+
+  /**
+   * Interface for signaling that a {@link OldDoFn} needs to access the window the
+   * element is being processed in, via {@link OldDoFn.ProcessContext#window}.
+   */
+  @Experimental
+  public interface RequiresWindowAccess {}
+
+  public OldDoFn() {
+    this(new HashMap<String, DelegatingAggregator<?, ?>>());
+  }
+
+  OldDoFn(Map<String, DelegatingAggregator<?, ?>> aggregators) {
+    this.aggregators = aggregators;
+  }
+
+  /////////////////////////////////////////////////////////////////////////////
+
+  private final Map<String, DelegatingAggregator<?, ?>> aggregators;
+
+  /**
+   * Protects aggregators from being created after initialization.
+   */
+  private boolean aggregatorsAreFinal;
+
+  /**
+   * Prepares this {@code OldDoFn} instance for processing a batch of elements.
+   *
+   * <p>By default, does nothing.
+   */
+  public void startBundle(Context c) throws Exception {
+  }
+
+  /**
+   * Processes one input element.
+   *
+   * <p>The current element of the input {@code PCollection} is returned by
+   * {@link ProcessContext#element() c.element()}. It should be considered immutable. The Dataflow
+   * runtime will not mutate the element, so it is safe to cache, etc. The element should not be
+   * mutated by any of the {@link OldDoFn} methods, because it may be cached elsewhere, retained by
+   * the Dataflow runtime, or used in other unspecified ways.
+   *
+   * <p>A value is added to the main output {@code PCollection} by {@link ProcessContext#output}.
+   * Once passed to {@code output} the element should be considered immutable and not be modified in
+   * any way. It may be cached elsewhere, retained by the Dataflow runtime, or used in other
+   * unspecified ways.
+   *
+   * @see ProcessContext
+   */
+  public abstract void processElement(ProcessContext c) throws Exception;
+
+  /**
+   * Finishes processing this batch of elements.
+   *
+   * <p>By default, does nothing.
+   */
+  public void finishBundle(Context c) throws Exception {
+  }
+
+  /**
+   * {@inheritDoc}
+   *
+   * <p>By default, does not register any display data. Implementors may override this method
+   * to provide their own display data.
+   */
+  @Override
+  public void populateDisplayData(DisplayData.Builder builder) {
+  }
+
+  /////////////////////////////////////////////////////////////////////////////
+
+  /**
+   * Returns a {@link TypeDescriptor} capturing what is known statically
+   * about the input type of this {@code OldDoFn} instance's most-derived
+   * class.
+   *
+   * <p>See {@link #getOutputTypeDescriptor} for more discussion.
+   */
+  protected TypeDescriptor<InputT> getInputTypeDescriptor() {
+    return new TypeDescriptor<InputT>(getClass()) {};
+  }
+
+  /**
+   * Returns a {@link TypeDescriptor} capturing what is known statically
+   * about the output type of this {@code OldDoFn} instance's
+   * most-derived class.
+   *
+   * <p>In the normal case of a concrete {@code OldDoFn} subclass with
+   * no generic type parameters of its own (including anonymous inner
+   * classes), this will be a complete non-generic type, which is good
+   * for choosing a default output {@code Coder<OutputT>} for the output
+   * {@code PCollection<OutputT>}.
+   */
+  protected TypeDescriptor<OutputT> getOutputTypeDescriptor() {
+    return new TypeDescriptor<OutputT>(getClass()) {};
+  }
+
+  /**
+   * Returns an {@link Aggregator} with aggregation logic specified by the
+   * {@link CombineFn} argument. The name provided must be unique across
+   * {@link Aggregator}s created within the OldDoFn. Aggregators can only be created
+   * during pipeline construction.
+   *
+   * @param name the name of the aggregator
+   * @param combiner the {@link CombineFn} to use in the aggregator
+   * @return an aggregator for the provided name and combiner in the scope of
+   *         this OldDoFn
+   * @throws NullPointerException if the name or combiner is null
+   * @throws IllegalArgumentException if the given name collides with another
+   *         aggregator in this scope
+   * @throws IllegalStateException if called during pipeline processing.
+   */
+  protected final <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
+      createAggregator(String name, CombineFn<? super AggInputT, ?, AggOutputT> combiner) {
+    checkNotNull(name, "name cannot be null");
+    checkNotNull(combiner, "combiner cannot be null");
+    checkArgument(!aggregators.containsKey(name),
+        "Cannot create aggregator with name %s."
+        + " An Aggregator with that name already exists within this scope.",
+        name);
+
+    checkState(!aggregatorsAreFinal, "Cannot create an aggregator during OldDoFn processing."
+        + " Aggregators should be registered during pipeline construction.");
+
+    DelegatingAggregator<AggInputT, AggOutputT> aggregator =
+        new DelegatingAggregator<>(name, combiner);
+    aggregators.put(name, aggregator);
+    return aggregator;
+  }
+
+  /**
+   * Returns an {@link Aggregator} with the aggregation logic specified by the
+   * {@link SerializableFunction} argument. The name provided must be unique
+   * across {@link Aggregator}s created within the OldDoFn. Aggregators can only be
+   * created during pipeline construction.
+   *
+   * @param name the name of the aggregator
+   * @param combiner the {@link SerializableFunction} to use in the aggregator
+   * @return an aggregator for the provided name and combiner in the scope of
+   *         this OldDoFn
+   * @throws NullPointerException if the name or combiner is null
+   * @throws IllegalArgumentException if the given name collides with another
+   *         aggregator in this scope
+   * @throws IllegalStateException if called during pipeline processing.
+   */
+  protected final <AggInputT> Aggregator<AggInputT, AggInputT> createAggregator(String name,
+      SerializableFunction<Iterable<AggInputT>, AggInputT> combiner) {
+    checkNotNull(combiner, "combiner cannot be null.");
+    return createAggregator(name, Combine.IterableCombineFn.of(combiner));
+  }
+
+  /**
+   * Returns the {@link Aggregator Aggregators} created by this {@code OldDoFn}.
+   */
+  Collection<Aggregator<?, ?>> getAggregators() {
+    return Collections.<Aggregator<?, ?>>unmodifiableCollection(aggregators.values());
+  }
+
+  /**
+   * An {@link Aggregator} that delegates calls to addValue to another
+   * aggregator.
+   *
+   * @param <AggInputT> the type of input element
+   * @param <AggOutputT> the type of output element
+   */
+  static class DelegatingAggregator<AggInputT, AggOutputT> implements
+      Aggregator<AggInputT, AggOutputT>, Serializable {
+    private final UUID id;
+
+    private final String name;
+
+    private final CombineFn<AggInputT, ?, AggOutputT> combineFn;
+
+    private Aggregator<AggInputT, ?> delegate;
+
+    public DelegatingAggregator(String name,
+        CombineFn<? super AggInputT, ?, AggOutputT> combiner) {
+      this.id = UUID.randomUUID();
+      this.name = checkNotNull(name, "name cannot be null");
+      // Safe contravariant cast
+      @SuppressWarnings("unchecked")
+      CombineFn<AggInputT, ?, AggOutputT> specificCombiner =
+          (CombineFn<AggInputT, ?, AggOutputT>) checkNotNull(combiner, "combineFn cannot be null");
+      this.combineFn = specificCombiner;
+    }
+
+    @Override
+    public void addValue(AggInputT value) {
+      if (delegate == null) {
+        throw new IllegalStateException(
+            "addValue cannot be called on Aggregator outside of the execution of a OldDoFn.");
+      } else {
+        delegate.addValue(value);
+      }
+    }
+
+    @Override
+    public String getName() {
+      return name;
+    }
+
+    @Override
+    public CombineFn<AggInputT, ?, AggOutputT> getCombineFn() {
+      return combineFn;
+    }
+
+    /**
+     * Sets the current delegate of the Aggregator.
+     *
+     * @param delegate the delegate to set in this aggregator
+     */
+    public void setDelegate(Aggregator<AggInputT, ?> delegate) {
+      this.delegate = delegate;
+    }
+
+    @Override
+    public String toString() {
+      return MoreObjects.toStringHelper(getClass())
+          .add("name", name)
+          .add("combineFn", combineFn)
+          .toString();
+    }
+
+    @Override
+    public int hashCode() {
+      return Objects.hash(id, name, combineFn.getClass());
+    }
+
+    /**
+     * Indicates whether some other object is "equal to" this one.
+     *
+     * <p>{@code DelegatingAggregator} instances are equal if they have the same name, their
+     * CombineFns are the same class, and they have identical IDs.
+     */
+    @Override
+    public boolean equals(Object o) {
+      if (o == this) {
+        return true;
+      }
+      if (o == null) {
+        return false;
+      }
+      if (o instanceof DelegatingAggregator) {
+        DelegatingAggregator<?, ?> that = (DelegatingAggregator<?, ?>) o;
+        return Objects.equals(this.id, that.id)
+            && Objects.equals(this.name, that.name)
+            && Objects.equals(this.combineFn.getClass(), that.combineFn.getClass());
+      }
+      return false;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java
index fe6e8ad..12ab54d 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java
@@ -147,7 +147,7 @@ import java.io.Serializable;
  * implementing {@code Serializable}.
  *
  * <p>{@code PTransform} is marked {@code Serializable} solely
- * because it is common for an anonymous {@code DoFn},
+ * because it is common for an anonymous {@code OldDoFn},
  * instance to be created within an
  * {@code apply()} method of a composite {@code PTransform}.
  *

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
index 16dfcac..36d8101 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
@@ -52,13 +52,13 @@ import java.util.List;
  * <p>The {@link ParDo} processing style is similar to what happens inside
  * the "Mapper" or "Reducer" class of a MapReduce-style algorithm.
  *
- * <h2>{@link DoFn DoFns}</h2>
+ * <h2>{@link OldDoFn DoFns}</h2>
  *
  * <p>The function to use to process each element is specified by a
- * {@link DoFn DoFn&lt;InputT, OutputT&gt;}, primarily via its
- * {@link DoFn#processElement processElement} method. The {@link DoFn} may also
- * override the default implementations of {@link DoFn#startBundle startBundle}
- * and {@link DoFn#finishBundle finishBundle}.
+ * {@link OldDoFn OldDoFn&lt;InputT, OutputT&gt;}, primarily via its
+ * {@link OldDoFn#processElement processElement} method. The {@link OldDoFn} may also
+ * override the default implementations of {@link OldDoFn#startBundle startBundle}
+ * and {@link OldDoFn#finishBundle finishBundle}.
  *
  * <p>Conceptually, when a {@link ParDo} transform is executed, the
  * elements of the input {@link PCollection} are first divided up
@@ -67,26 +67,27 @@ import java.util.List;
  * For each bundle of input elements processing proceeds as follows:
  *
  * <ol>
- *   <li>If required, a fresh instance of the argument {@link DoFn} is created
+ *   <li>If required, a fresh instance of the argument {@link OldDoFn} is created
  *     on a worker. This may be through deserialization or other means. A
- *     {@link PipelineRunner} may reuse {@link DoFn} instances for multiple bundles.
- *     A {@link DoFn} that has terminated abnormally (by throwing an {@link Exception}
+ *     {@link PipelineRunner} may reuse {@link OldDoFn} instances for multiple bundles.
+ *     A {@link OldDoFn} that has terminated abnormally (by throwing an {@link Exception}
  *     will never be reused.</li>
- *   <li>The {@link DoFn DoFn's} {@link DoFn#startBundle} method is called to
+ *   <li>The {@link OldDoFn OldDoFn's} {@link OldDoFn#startBundle} method is called to
  *     initialize it. If this method is not overridden, the call may be optimized
  *     away.</li>
- *   <li>The {@link DoFn DoFn's} {@link DoFn#processElement} method
+ *   <li>The {@link OldDoFn OldDoFn's} {@link OldDoFn#processElement} method
  *     is called on each of the input elements in the bundle.</li>
- *   <li>The {@link DoFn DoFn's} {@link DoFn#finishBundle} method is called
- *     to complete its work. After {@link DoFn#finishBundle} is called, the
- *     framework will not again invoke {@link DoFn#processElement} or {@link DoFn#finishBundle}
- *     until a new call to {@link DoFn#startBundle} has occurred.
+ *   <li>The {@link OldDoFn OldDoFn's} {@link OldDoFn#finishBundle} method is called
+ *     to complete its work. After {@link OldDoFn#finishBundle} is called, the
+ *     framework will not again invoke {@link OldDoFn#processElement} or
+ *     {@link OldDoFn#finishBundle}
+ *     until a new call to {@link OldDoFn#startBundle} has occurred.
  *     If this method is not overridden, this call may be optimized away.</li>
  * </ol>
  *
- * Each of the calls to any of the {@link DoFn DoFn's} processing
+ * Each of the calls to any of the {@link OldDoFn OldDoFn's} processing
  * methods can produce zero or more output elements. All of the
- * of output elements from all of the {@link DoFn} instances
+ * of output elements from all of the {@link OldDoFn} instances
  * are included in the output {@link PCollection}.
  *
  * <p>For example:
@@ -94,7 +95,7 @@ import java.util.List;
  * <pre> {@code
  * PCollection<String> lines = ...;
  * PCollection<String> words =
- *     lines.apply(ParDo.of(new DoFn<String, String>() {
+ *     lines.apply(ParDo.of(new OldDoFn<String, String>() {
  *         public void processElement(ProcessContext c) {
  *           String line = c.element();
  *           for (String word : line.split("[^a-zA-Z']+")) {
@@ -102,7 +103,7 @@ import java.util.List;
  *           }
  *         }}));
  * PCollection<Integer> wordLengths =
- *     words.apply(ParDo.of(new DoFn<String, Integer>() {
+ *     words.apply(ParDo.of(new OldDoFn<String, Integer>() {
  *         public void processElement(ProcessContext c) {
  *           String word = c.element();
  *           Integer length = word.length();
@@ -127,9 +128,9 @@ import java.util.List;
  *
  * <pre> {@code
  * PCollection<String> words =
- *     lines.apply("ExtractWords", ParDo.of(new DoFn<String, String>() { ... }));
+ *     lines.apply("ExtractWords", ParDo.of(new OldDoFn<String, String>() { ... }));
  * PCollection<Integer> wordLengths =
- *     words.apply("ComputeWordLengths", ParDo.of(new DoFn<String, Integer>() { ... }));
+ *     words.apply("ComputeWordLengths", ParDo.of(new OldDoFn<String, Integer>() { ... }));
  * } </pre>
  *
  * <h2>Side Inputs</h2>
@@ -141,7 +142,7 @@ import java.util.List;
  * {@link PCollection PCollections} computed by earlier pipeline operations,
  * passed in to the {@link ParDo} transform using
  * {@link #withSideInputs}, and their contents accessible to each of
- * the {@link DoFn} operations via {@link DoFn.ProcessContext#sideInput sideInput}.
+ * the {@link OldDoFn} operations via {@link OldDoFn.ProcessContext#sideInput sideInput}.
  * For example:
  *
  * <pre> {@code
@@ -151,7 +152,7 @@ import java.util.List;
  *     maxWordLengthCutOff.apply(View.<Integer>asSingleton());
  * PCollection<String> wordsBelowCutOff =
  *     words.apply(ParDo.withSideInputs(maxWordLengthCutOffView)
- *                      .of(new DoFn<String, String>() {
+ *                      .of(new OldDoFn<String, String>() {
  *         public void processElement(ProcessContext c) {
  *           String word = c.element();
  *           int lengthCutOff = c.sideInput(maxWordLengthCutOffView);
@@ -170,11 +171,11 @@ import java.util.List;
  * and bundled in a {@link PCollectionTuple}. The {@link TupleTag TupleTags}
  * to be used for the output {@link PCollectionTuple} are specified by
  * invoking {@link #withOutputTags}. Unconsumed side outputs do not
- * necessarily need to be explicitly specified, even if the {@link DoFn}
- * generates them. Within the {@link DoFn}, an element is added to the
+ * necessarily need to be explicitly specified, even if the {@link OldDoFn}
+ * generates them. Within the {@link OldDoFn}, an element is added to the
  * main output {@link PCollection} as normal, using
- * {@link DoFn.Context#output}, while an element is added to a side output
- * {@link PCollection} using {@link DoFn.Context#sideOutput}. For example:
+ * {@link OldDoFn.Context#output}, while an element is added to a side output
+ * {@link PCollection} using {@link OldDoFn.Context#sideOutput}. For example:
  *
  * <pre> {@code
  * PCollection<String> words = ...;
@@ -197,7 +198,7 @@ import java.util.List;
  *         .withOutputTags(wordsBelowCutOffTag,
  *                         TupleTagList.of(wordLengthsAboveCutOffTag)
  *                                     .and(markedWordsTag))
- *         .of(new DoFn<String, String>() {
+ *         .of(new OldDoFn<String, String>() {
  *             // Create a tag for the unconsumed side output.
  *             final TupleTag<String> specialWordsTag =
  *                 new TupleTag<String>(){};
@@ -232,7 +233,7 @@ import java.util.List;
  *
  * <p>Several properties can be specified for a {@link ParDo}
  * {@link PTransform}, including name, side inputs, side output tags,
- * and {@link DoFn} to invoke. Only the {@link DoFn} is required; the
+ * and {@link OldDoFn} to invoke. Only the {@link OldDoFn} is required; the
  * name is encouraged but not required, and side inputs and side
  * output tags are only specified when they're needed. These
  * properties can be specified in any order, as long as they're
@@ -246,23 +247,23 @@ import java.util.List;
  * {@link ParDo.Bound} nested classes, each of which offer
  * property setter instance methods to enable setting additional
  * properties. {@link ParDo.Bound} is used for {@link ParDo}
- * transforms whose {@link DoFn} is specified and whose input and
+ * transforms whose {@link OldDoFn} is specified and whose input and
  * output static types have been bound. {@link ParDo.Unbound ParDo.Unbound} is used
  * for {@link ParDo} transforms that have not yet had their
- * {@link DoFn} specified. Only {@link ParDo.Bound} instances can be
+ * {@link OldDoFn} specified. Only {@link ParDo.Bound} instances can be
  * applied.
  *
  * <p>Another benefit of this approach is that it reduces the number
  * of type parameters that need to be specified manually. In
  * particular, the input and output types of the {@link ParDo}
  * {@link PTransform} are inferred automatically from the type
- * parameters of the {@link DoFn} argument passed to {@link ParDo#of}.
+ * parameters of the {@link OldDoFn} argument passed to {@link ParDo#of}.
  *
  * <h2>Output Coders</h2>
  *
  * <p>By default, the {@link Coder Coder&lt;OutputT&gt;} for the
  * elements of the main output {@link PCollection PCollection&lt;OutputT&gt;} is
- * inferred from the concrete type of the {@link DoFn DoFn&lt;InputT, OutputT&gt;}.
+ * inferred from the concrete type of the {@link OldDoFn OldDoFn&lt;InputT, OutputT&gt;}.
  *
  * <p>By default, the {@link Coder Coder&lt;SideOutputT&gt;} for the elements of
  * a side output {@link PCollection PCollection&lt;SideOutputT&gt;} is inferred
@@ -282,74 +283,74 @@ import java.util.List;
  * This style of {@code TupleTag} instantiation is used in the example of
  * multiple side outputs, above.
  *
- * <h2>Serializability of {@link DoFn DoFns}</h2>
+ * <h2>Serializability of {@link OldDoFn DoFns}</h2>
  *
- * <p>A {@link DoFn} passed to a {@link ParDo} transform must be
- * {@link Serializable}. This allows the {@link DoFn} instance
+ * <p>A {@link OldDoFn} passed to a {@link ParDo} transform must be
+ * {@link Serializable}. This allows the {@link OldDoFn} instance
  * created in this "main program" to be sent (in serialized form) to
  * remote worker machines and reconstituted for bundles of elements
- * of the input {@link PCollection} being processed. A {@link DoFn}
+ * of the input {@link PCollection} being processed. A {@link OldDoFn}
  * can have instance variable state, and non-transient instance
  * variable state will be serialized in the main program and then
  * deserialized on remote worker machines for some number of bundles
  * of elements to process.
  *
- * <p>{@link DoFn DoFns} expressed as anonymous inner classes can be
+ * <p>{@link OldDoFn DoFns} expressed as anonymous inner classes can be
  * convenient, but due to a quirk in Java's rules for serializability,
  * non-static inner or nested classes (including anonymous inner
  * classes) automatically capture their enclosing class's instance in
  * their serialized state. This can lead to including much more than
- * intended in the serialized state of a {@link DoFn}, or even things
+ * intended in the serialized state of a {@link OldDoFn}, or even things
  * that aren't {@link Serializable}.
  *
  * <p>There are two ways to avoid unintended serialized state in a
- * {@link DoFn}:
+ * {@link OldDoFn}:
  *
  * <ul>
  *
- * <li>Define the {@link DoFn} as a named, static class.
+ * <li>Define the {@link OldDoFn} as a named, static class.
  *
- * <li>Define the {@link DoFn} as an anonymous inner class inside of
+ * <li>Define the {@link OldDoFn} as an anonymous inner class inside of
  * a static method.
  *
  * </ul>
  *
  * <p>Both of these approaches ensure that there is no implicit enclosing
- * instance serialized along with the {@link DoFn} instance.
+ * instance serialized along with the {@link OldDoFn} instance.
  *
  * <p>Prior to Java 8, any local variables of the enclosing
  * method referenced from within an anonymous inner class need to be
- * marked as {@code final}. If defining the {@link DoFn} as a named
+ * marked as {@code final}. If defining the {@link OldDoFn} as a named
  * static class, such variables would be passed as explicit
  * constructor arguments and stored in explicit instance variables.
  *
  * <p>There are three main ways to initialize the state of a
- * {@link DoFn} instance processing a bundle:
+ * {@link OldDoFn} instance processing a bundle:
  *
  * <ul>
  *
  * <li>Define instance variable state (including implicit instance
  * variables holding final variables captured by an anonymous inner
- * class), initialized by the {@link DoFn}'s constructor (which is
+ * class), initialized by the {@link OldDoFn}'s constructor (which is
  * implicit for an anonymous inner class). This state will be
- * automatically serialized and then deserialized in the {@code DoFn}
+ * automatically serialized and then deserialized in the {@code OldDoFn}
  * instances created for bundles. This method is good for state
- * known when the original {@code DoFn} is created in the main
+ * known when the original {@code OldDoFn} is created in the main
  * program, if it's not overly large. This is not suitable for any
- * state which must only be used for a single bundle, as {@link DoFn DoFn's}
+ * state which must only be used for a single bundle, as {@link OldDoFn OldDoFn's}
  * may be used to process multiple bundles.
  *
  * <li>Compute the state as a singleton {@link PCollection} and pass it
- * in as a side input to the {@link DoFn}. This is good if the state
+ * in as a side input to the {@link OldDoFn}. This is good if the state
  * needs to be computed by the pipeline, or if the state is very large
  * and so is best read from file(s) rather than sent as part of the
- * {@code DoFn}'s serialized state.
+ * {@code OldDoFn}'s serialized state.
  *
- * <li>Initialize the state in each {@link DoFn} instance, in
- * {@link DoFn#startBundle}. This is good if the initialization
+ * <li>Initialize the state in each {@link OldDoFn} instance, in
+ * {@link OldDoFn#startBundle}. This is good if the initialization
  * doesn't depend on any information known only by the main program or
  * computed by earlier pipeline operations, but is the same for all
- * instances of this {@link DoFn} for all program executions, say
+ * instances of this {@link OldDoFn} for all program executions, say
  * setting up empty caches or initializing constant data.
  *
  * </ul>
@@ -362,13 +363,13 @@ import java.util.List;
  * no support in the Google Cloud Dataflow system for communicating
  * and synchronizing updates to shared state across worker machines,
  * so programs should not access any mutable static variable state in
- * their {@link DoFn}, without understanding that the Java processes
+ * their {@link OldDoFn}, without understanding that the Java processes
  * for the main program and workers will each have its own independent
  * copy of such state, and there won't be any automatic copying of
  * that state across Java processes. All information should be
- * communicated to {@link DoFn} instances via main and side inputs and
+ * communicated to {@link OldDoFn} instances via main and side inputs and
  * serialized state, and all output should be communicated from a
- * {@link DoFn} instance via main and side outputs, in the absence of
+ * {@link OldDoFn} instance via main and side outputs, in the absence of
  * external communication mechanisms written by user code.
  *
  * <h2>Fault Tolerance</h2>
@@ -378,23 +379,23 @@ import java.util.List;
  * While individual failures are rare, the larger the job, the greater
  * the chance that something, somewhere, will fail. The Google Cloud
  * Dataflow service strives to mask such failures automatically,
- * principally by retrying failed {@link DoFn} bundle. This means
- * that a {@code DoFn} instance might process a bundle partially, then
+ * principally by retrying failed {@link OldDoFn} bundle. This means
+ * that a {@code OldDoFn} instance might process a bundle partially, then
  * crash for some reason, then be rerun (often on a different worker
  * machine) on that same bundle and on the same elements as before.
- * Sometimes two or more {@link DoFn} instances will be running on the
+ * Sometimes two or more {@link OldDoFn} instances will be running on the
  * same bundle simultaneously, with the system taking the results of
  * the first instance to complete successfully. Consequently, the
- * code in a {@link DoFn} needs to be written such that these
+ * code in a {@link OldDoFn} needs to be written such that these
  * duplicate (sequential or concurrent) executions do not cause
- * problems. If the outputs of a {@link DoFn} are a pure function of
+ * problems. If the outputs of a {@link OldDoFn} are a pure function of
  * its inputs, then this requirement is satisfied. However, if a
- * {@link DoFn DoFn's} execution has external side-effects, such as performing
- * updates to external HTTP services, then the {@link DoFn DoFn's} code
+ * {@link OldDoFn OldDoFn's} execution has external side-effects, such as performing
+ * updates to external HTTP services, then the {@link OldDoFn OldDoFn's} code
  * needs to take care to ensure that those updates are idempotent and
  * that concurrent updates are acceptable. This property can be
  * difficult to achieve, so it is advisable to strive to keep
- * {@link DoFn DoFns} as pure functions as much as possible.
+ * {@link OldDoFn DoFns} as pure functions as much as possible.
  *
  * <h2>Optimization</h2>
  *
@@ -439,15 +440,15 @@ public class ParDo {
    *
    * <p>Side inputs are {@link PCollectionView PCollectionViews}, whose contents are
    * computed during pipeline execution and then made accessible to
-   * {@link DoFn} code via {@link DoFn.ProcessContext#sideInput sideInput}. Each
-   * invocation of the {@link DoFn} receives the same values for these
+   * {@link OldDoFn} code via {@link OldDoFn.ProcessContext#sideInput sideInput}. Each
+   * invocation of the {@link OldDoFn} receives the same values for these
    * side inputs.
    *
    * <p>See the discussion of Side Inputs above for more explanation.
    *
    * <p>The resulting {@link PTransform} is incomplete, and its
    * input/output types are not yet bound. Use
-   * {@link ParDo.Unbound#of} to specify the {@link DoFn} to
+   * {@link ParDo.Unbound#of} to specify the {@link OldDoFn} to
    * invoke, which will also bind the input/output types of this
    * {@link PTransform}.
    */
@@ -460,13 +461,13 @@ public class ParDo {
     *
    * <p>Side inputs are {@link PCollectionView}s, whose contents are
    * computed during pipeline execution and then made accessible to
-   * {@code DoFn} code via {@link DoFn.ProcessContext#sideInput sideInput}.
+   * {@code OldDoFn} code via {@link OldDoFn.ProcessContext#sideInput sideInput}.
    *
    * <p>See the discussion of Side Inputs above for more explanation.
    *
    * <p>The resulting {@link PTransform} is incomplete, and its
    * input/output types are not yet bound. Use
-   * {@link ParDo.Unbound#of} to specify the {@link DoFn} to
+   * {@link ParDo.Unbound#of} to specify the {@link OldDoFn} to
    * invoke, which will also bind the input/output types of this
    * {@link PTransform}.
    */
@@ -482,11 +483,11 @@ public class ParDo {
    *
    * <p>{@link TupleTag TupleTags} are used to name (with its static element
    * type {@code T}) each main and side output {@code PCollection<T>}.
-   * This {@link PTransform PTransform's} {@link DoFn} emits elements to the main
+   * This {@link PTransform PTransform's} {@link OldDoFn} emits elements to the main
    * output {@link PCollection} as normal, using
-   * {@link DoFn.Context#output}. The {@link DoFn} emits elements to
+   * {@link OldDoFn.Context#output}. The {@link OldDoFn} emits elements to
    * a side output {@code PCollection} using
-   * {@link DoFn.Context#sideOutput}, passing that side output's tag
+   * {@link OldDoFn.Context#sideOutput}, passing that side output's tag
    * as an argument. The result of invoking this {@link PTransform}
    * will be a {@link PCollectionTuple}, and any of the the main and
    * side output {@code PCollection}s can be retrieved from it via
@@ -497,7 +498,7 @@ public class ParDo {
    *
    * <p>The resulting {@link PTransform} is incomplete, and its input
    * type is not yet bound. Use {@link ParDo.UnboundMulti#of}
-   * to specify the {@link DoFn} to invoke, which will also bind the
+   * to specify the {@link OldDoFn} to invoke, which will also bind the
    * input type of this {@link PTransform}.
    */
   public static <OutputT> UnboundMulti<OutputT> withOutputTags(
@@ -508,24 +509,24 @@ public class ParDo {
 
   /**
    * Creates a {@link ParDo} {@link PTransform} that will invoke the
-   * given {@link DoFn} function.
+   * given {@link OldDoFn} function.
    *
    * <p>The resulting {@link PTransform PTransform's} types have been bound, with the
    * input being a {@code PCollection<InputT>} and the output a
    * {@code PCollection<OutputT>}, inferred from the types of the argument
-   * {@code DoFn<InputT, OutputT>}. It is ready to be applied, or further
+   * {@code OldDoFn<InputT, OutputT>}. It is ready to be applied, or further
    * properties can be set on it first.
    */
-  public static <InputT, OutputT> Bound<InputT, OutputT> of(DoFn<InputT, OutputT> fn) {
+  public static <InputT, OutputT> Bound<InputT, OutputT> of(OldDoFn<InputT, OutputT> fn) {
     return of(fn, fn.getClass());
   }
 
   private static <InputT, OutputT> Bound<InputT, OutputT> of(
-          DoFn<InputT, OutputT> fn, Class<?> fnClass) {
+          OldDoFn<InputT, OutputT> fn, Class<?> fnClass) {
     return new Unbound().of(fn, fnClass);
   }
 
-  private static <InputT, OutputT> DoFn<InputT, OutputT>
+  private static <InputT, OutputT> OldDoFn<InputT, OutputT>
       adapt(DoFnWithContext<InputT, OutputT> fn) {
     return DoFnReflector.of(fn.getClass()).toDoFn(fn);
   }
@@ -537,11 +538,11 @@ public class ParDo {
    * <p>The resulting {@link PTransform PTransform's} types have been bound, with the
    * input being a {@code PCollection<InputT>} and the output a
    * {@code PCollection<OutputT>}, inferred from the types of the argument
-   * {@code DoFn<InputT, OutputT>}. It is ready to be applied, or further
+   * {@code OldDoFn<InputT, OutputT>}. It is ready to be applied, or further
    * properties can be set on it first.
    *
    * <p>{@link DoFnWithContext} is an experimental alternative to
-   * {@link DoFn} which simplifies accessing the window of the element.
+   * {@link OldDoFn} which simplifies accessing the window of the element.
    */
   @Experimental
   public static <InputT, OutputT> Bound<InputT, OutputT> of(DoFnWithContext<InputT, OutputT> fn) {
@@ -552,7 +553,7 @@ public class ParDo {
    * An incomplete {@link ParDo} transform, with unbound input/output types.
    *
    * <p>Before being applied, {@link ParDo.Unbound#of} must be
-   * invoked to specify the {@link DoFn} to invoke, which will also
+   * invoked to specify the {@link OldDoFn} to invoke, which will also
    * bind the input/output types of this {@link PTransform}.
    */
   public static class Unbound {
@@ -614,18 +615,18 @@ public class ParDo {
 
     /**
      * Returns a new {@link ParDo} {@link PTransform} that's like this
-     * transform but that will invoke the given {@link DoFn}
+     * transform but that will invoke the given {@link OldDoFn}
      * function, and that has its input and output types bound. Does
      * not modify this transform. The resulting {@link PTransform} is
      * sufficiently specified to be applied, but more properties can
      * still be specified.
      */
-    public <InputT, OutputT> Bound<InputT, OutputT> of(DoFn<InputT, OutputT> fn) {
+    public <InputT, OutputT> Bound<InputT, OutputT> of(OldDoFn<InputT, OutputT> fn) {
       return of(fn, fn.getClass());
     }
 
     private <InputT, OutputT> Bound<InputT, OutputT> of(
-        DoFn<InputT, OutputT> fn, Class<?> fnClass) {
+        OldDoFn<InputT, OutputT> fn, Class<?> fnClass) {
       return new Bound<>(name, sideInputs, fn, fnClass);
     }
 
@@ -645,7 +646,7 @@ public class ParDo {
 
   /**
    * A {@link PTransform} that, when applied to a {@code PCollection<InputT>},
-   * invokes a user-specified {@code DoFn<InputT, OutputT>} on all its elements,
+   * invokes a user-specified {@code OldDoFn<InputT, OutputT>} on all its elements,
    * with all its outputs collected into an output
    * {@code PCollection<OutputT>}.
    *
@@ -659,12 +660,12 @@ public class ParDo {
       extends PTransform<PCollection<? extends InputT>, PCollection<OutputT>> {
     // Inherits name.
     private final List<PCollectionView<?>> sideInputs;
-    private final DoFn<InputT, OutputT> fn;
+    private final OldDoFn<InputT, OutputT> fn;
     private final Class<?> fnClass;
 
     Bound(String name,
           List<PCollectionView<?>> sideInputs,
-          DoFn<InputT, OutputT> fn,
+          OldDoFn<InputT, OutputT> fn,
           Class<?> fnClass) {
       super(name);
       this.sideInputs = sideInputs;
@@ -746,9 +747,9 @@ public class ParDo {
     /**
      * {@inheritDoc}
      *
-     * <p>{@link ParDo} registers its internal {@link DoFn} as a subcomponent for display data.
-     * {@link DoFn} implementations can register display data by overriding
-     * {@link DoFn#populateDisplayData}.
+     * <p>{@link ParDo} registers its internal {@link OldDoFn} as a subcomponent for display data.
+     * {@link OldDoFn} implementations can register display data by overriding
+     * {@link OldDoFn#populateDisplayData}.
      */
     @Override
     public void populateDisplayData(Builder builder) {
@@ -756,7 +757,7 @@ public class ParDo {
       ParDo.populateDisplayData(builder, fn, fnClass);
     }
 
-    public DoFn<InputT, OutputT> getFn() {
+    public OldDoFn<InputT, OutputT> getFn() {
       return fn;
     }
 
@@ -770,7 +771,7 @@ public class ParDo {
    * input type.
    *
    * <p>Before being applied, {@link ParDo.UnboundMulti#of} must be
-   * invoked to specify the {@link DoFn} to invoke, which will also
+   * invoked to specify the {@link OldDoFn} to invoke, which will also
    * bind the input type of this {@link PTransform}.
    *
    * @param <OutputT> the type of the main output {@code PCollection} elements
@@ -827,16 +828,16 @@ public class ParDo {
     /**
      * Returns a new multi-output {@link ParDo} {@link PTransform}
      * that's like this transform but that will invoke the given
-     * {@link DoFn} function, and that has its input type bound.
+     * {@link OldDoFn} function, and that has its input type bound.
      * Does not modify this transform. The resulting
      * {@link PTransform} is sufficiently specified to be applied, but
      * more properties can still be specified.
      */
-    public <InputT> BoundMulti<InputT, OutputT> of(DoFn<InputT, OutputT> fn) {
+    public <InputT> BoundMulti<InputT, OutputT> of(OldDoFn<InputT, OutputT> fn) {
       return of(fn, fn.getClass());
     }
 
-    public <InputT> BoundMulti<InputT, OutputT> of(DoFn<InputT, OutputT> fn, Class<?> fnClass) {
+    public <InputT> BoundMulti<InputT, OutputT> of(OldDoFn<InputT, OutputT> fn, Class<?> fnClass) {
       return new BoundMulti<>(
               name, sideInputs, mainOutputTag, sideOutputTags, fn, fnClass);
     }
@@ -857,7 +858,7 @@ public class ParDo {
   /**
    * A {@link PTransform} that, when applied to a
    * {@code PCollection<InputT>}, invokes a user-specified
-   * {@code DoFn<InputT, OutputT>} on all its elements, which can emit elements
+   * {@code OldDoFn<InputT, OutputT>} on all its elements, which can emit elements
    * to any of the {@link PTransform}'s main and side output
    * {@code PCollection}s, which are bundled into a result
    * {@code PCollectionTuple}.
@@ -871,14 +872,14 @@ public class ParDo {
     private final List<PCollectionView<?>> sideInputs;
     private final TupleTag<OutputT> mainOutputTag;
     private final TupleTagList sideOutputTags;
-    private final DoFn<InputT, OutputT> fn;
+    private final OldDoFn<InputT, OutputT> fn;
     private final Class<?> fnClass;
 
     BoundMulti(String name,
                List<PCollectionView<?>> sideInputs,
                TupleTag<OutputT> mainOutputTag,
                TupleTagList sideOutputTags,
-               DoFn<InputT, OutputT> fn,
+               OldDoFn<InputT, OutputT> fn,
                Class<?> fnClass) {
       super(name);
       this.sideInputs = sideInputs;
@@ -929,7 +930,7 @@ public class ParDo {
           input.isBounded());
 
       // The fn will likely be an instance of an anonymous subclass
-      // such as DoFn<Integer, String> { }, thus will have a high-fidelity
+      // such as OldDoFn<Integer, String> { }, thus will have a high-fidelity
       // TypeDescriptor for the output type.
       outputs.get(mainOutputTag).setTypeDescriptorInternal(fn.getOutputTypeDescriptor());
 
@@ -970,7 +971,7 @@ public class ParDo {
       ParDo.populateDisplayData(builder, fn, fnClass);
     }
 
-    public DoFn<InputT, OutputT> getFn() {
+    public OldDoFn<InputT, OutputT> getFn() {
       return fn;
     }
 
@@ -988,7 +989,7 @@ public class ParDo {
   }
 
   private static void populateDisplayData(
-      DisplayData.Builder builder, DoFn<?, ?> fn, Class<?> fnClass) {
+      DisplayData.Builder builder, OldDoFn<?, ?> fn, Class<?> fnClass) {
     builder
         .include(fn)
         .add(DisplayData.item("fn", fnClass)

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
index 6281b30..2ddcc29 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
@@ -134,7 +134,7 @@ public class Partition<T> extends PTransform<PCollection<T>, PCollectionList<T>>
     this.partitionDoFn = partitionDoFn;
   }
 
-  private static class PartitionDoFn<X> extends DoFn<X, Void> {
+  private static class PartitionDoFn<X> extends OldDoFn<X, Void> {
     private final int numPartitions;
     private final PartitionFn<? super X> partitionFn;
     private final TupleTagList outputTags;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
index b82744d..d82c457 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
@@ -85,7 +85,7 @@ public class RemoveDuplicates<T> extends PTransform<PCollection<T>,
   @Override
   public PCollection<T> apply(PCollection<T> in) {
     return in
-        .apply("CreateIndex", ParDo.of(new DoFn<T, KV<T, Void>>() {
+        .apply("CreateIndex", ParDo.of(new OldDoFn<T, KV<T, Void>>() {
           @Override
           public void processElement(ProcessContext c) {
             c.output(KV.of(c.element(), (Void) null));

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
index 4fcd17e..724b252 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
@@ -164,9 +164,9 @@ public class Sample {
   }
 
   /**
-   * A {@link DoFn} that returns up to limit elements from the side input PCollection.
+   * A {@link OldDoFn} that returns up to limit elements from the side input PCollection.
    */
-  private static class SampleAnyDoFn<T> extends DoFn<Void, T> {
+  private static class SampleAnyDoFn<T> extends OldDoFn<Void, T> {
     long limit;
     final PCollectionView<Iterable<T>> iterableView;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
index a879925..6623c6a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
@@ -29,7 +29,7 @@ public abstract class SimpleFunction<InputT, OutputT>
 
   /**
    * Returns a {@link TypeDescriptor} capturing what is known statically
-   * about the input type of this {@code DoFn} instance's most-derived
+   * about the input type of this {@code OldDoFn} instance's most-derived
    * class.
    *
    * <p>See {@link #getOutputTypeDescriptor} for more discussion.
@@ -40,10 +40,10 @@ public abstract class SimpleFunction<InputT, OutputT>
 
   /**
    * Returns a {@link TypeDescriptor} capturing what is known statically
-   * about the output type of this {@code DoFn} instance's
+   * about the output type of this {@code OldDoFn} instance's
    * most-derived class.
    *
-   * <p>In the normal case of a concrete {@code DoFn} subclass with
+   * <p>In the normal case of a concrete {@code OldDoFn} subclass with
    * no generic type parameters of its own (including anonymous inner
    * classes), this will be a complete non-generic type, which is good
    * for choosing a default output {@code Coder<OutputT>} for the output

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
index 5212261..856e32a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
@@ -58,7 +58,7 @@ public class Values<V> extends PTransform<PCollection<? extends KV<?, V>>,
   @Override
   public PCollection<V> apply(PCollection<? extends KV<?, V>> in) {
     return
-        in.apply("Values", ParDo.of(new DoFn<KV<?, V>, V>() {
+        in.apply("Values", ParDo.of(new OldDoFn<KV<?, V>, V>() {
           @Override
           public void processElement(ProcessContext c) {
             c.output(c.element().getValue());

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
index 7a97c13..8a61637 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
@@ -38,7 +38,7 @@ import java.util.Map;
  *
  * <p>When a {@link ParDo} tranform is processing a main input
  * element in a window {@code w} and a {@link PCollectionView} is read via
- * {@link DoFn.ProcessContext#sideInput}, the value of the view for {@code w} is
+ * {@link OldDoFn.ProcessContext#sideInput}, the value of the view for {@code w} is
  * returned.
  *
  * <p>The SDK supports viewing a {@link PCollection}, per window, as a single value,
@@ -118,7 +118,7 @@ import java.util.Map;
  *
  * PCollection PageVisits = urlVisits
  *     .apply(ParDo.withSideInputs(urlToPage)
- *         .of(new DoFn<UrlVisit, PageVisit>() {
+ *         .of(new OldDoFn<UrlVisit, PageVisit>() {
  *             {@literal @}Override
  *             void processElement(ProcessContext context) {
  *               UrlVisit urlVisit = context.element();
@@ -154,11 +154,11 @@ public class View {
    *
    * <p>If the input {@link PCollection} is empty,
    * throws {@link java.util.NoSuchElementException} in the consuming
-   * {@link DoFn}.
+   * {@link OldDoFn}.
    *
    * <p>If the input {@link PCollection} contains more than one
    * element, throws {@link IllegalArgumentException} in the
-   * consuming {@link DoFn}.
+   * consuming {@link OldDoFn}.
    */
   public static <T> AsSingleton<T> asSingleton() {
     return new AsSingleton<>();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
index 25116d8..37d45aa 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
@@ -113,7 +113,7 @@ public class WithKeys<K, V> extends PTransform<PCollection<V>,
   @Override
   public PCollection<KV<K, V>> apply(PCollection<V> in) {
     PCollection<KV<K, V>> result =
-        in.apply("AddKeys", ParDo.of(new DoFn<V, KV<K, V>>() {
+        in.apply("AddKeys", ParDo.of(new OldDoFn<V, KV<K, V>>() {
           @Override
           public void processElement(ProcessContext c) {
             c.output(KV.of(fn.apply(c.element()),

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
index ef4b269..41b549b 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
@@ -92,7 +92,7 @@ public class WithTimestamps<T> extends PTransform<PCollection<T>, PCollection<T>
    * Returns the allowed timestamp skew duration, which is the maximum
    * duration that timestamps can be shifted backwards from the timestamp of the input element.
    *
-   * @see DoFn#getAllowedTimestampSkew()
+   * @see OldDoFn#getAllowedTimestampSkew()
    */
   public Duration getAllowedTimestampSkew() {
     return allowedTimestampSkew;
@@ -105,7 +105,7 @@ public class WithTimestamps<T> extends PTransform<PCollection<T>, PCollection<T>
         .setTypeDescriptorInternal(input.getTypeDescriptor());
   }
 
-  private static class AddTimestampsDoFn<T> extends DoFn<T, T> {
+  private static class AddTimestampsDoFn<T> extends OldDoFn<T, T> {
     private final SerializableFunction<T, Instant> fn;
     private final Duration allowedTimestampSkew;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java
index ee7323b..5dcaec8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java
@@ -30,7 +30,6 @@ import com.google.common.collect.Sets;
 import com.fasterxml.jackson.annotation.JsonGetter;
 import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonValue;
-
 import org.apache.avro.reflect.Nullable;
 import org.joda.time.Duration;
 import org.joda.time.Instant;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java
index 5e4cb52..aa26cbb 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java
@@ -37,7 +37,6 @@ import com.google.common.collect.PeekingIterator;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
index ba4a4a7..1bd9f4a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
@@ -19,9 +19,9 @@ package org.apache.beam.sdk.transforms.join;
 
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.join.CoGbkResult.CoGbkResultCoder;
@@ -57,7 +57,7 @@ import java.util.List;
  *
  * PCollection<T> finalResultCollection =
  *   coGbkResultCollection.apply(ParDo.of(
- *     new DoFn<KV<K, CoGbkResult>, T>() {
+ *     new OldDoFn<KV<K, CoGbkResult>, T>() {
  *       @Override
  *       public void processElement(ProcessContext c) {
  *         KV<K, CoGbkResult> e = c.element();
@@ -167,12 +167,12 @@ public class CoGroupByKey<K> extends
   }
 
   /**
-   * A DoFn to construct a UnionTable (i.e., a
+   * A OldDoFn to construct a UnionTable (i.e., a
    * {@code PCollection<KV<K, RawUnionValue>>} from a
    * {@code PCollection<KV<K, V>>}.
    */
   private static class ConstructUnionTableFn<K, V> extends
-      DoFn<KV<K, V>, KV<K, RawUnionValue>> {
+      OldDoFn<KV<K, V>, KV<K, RawUnionValue>> {
 
     private final int index;
 
@@ -188,12 +188,12 @@ public class CoGroupByKey<K> extends
   }
 
   /**
-   * A DoFn to construct a CoGbkResult from an input grouped union
+   * A OldDoFn to construct a CoGbkResult from an input grouped union
    * table.
     */
   private static class ConstructCoGbkResultFn<K>
-    extends DoFn<KV<K, Iterable<RawUnionValue>>,
-                 KV<K, CoGbkResult>> {
+    extends OldDoFn<KV<K, Iterable<RawUnionValue>>,
+                     KV<K, CoGbkResult>> {
 
     private final CoGbkResultSchema schema;
 

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java
index bd57339..dc1e74b 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java
@@ -23,6 +23,7 @@ import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.util.ExecutableTrigger;
 
 import com.google.common.base.Joiner;
+
 import org.joda.time.Instant;
 
 import java.util.Arrays;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
index 563455b..324ab08 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
@@ -25,6 +25,7 @@ import org.joda.time.Instant;
 
 import java.util.List;
 import java.util.Objects;
+
 import javax.annotation.Nullable;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java
index 6f9c717..45898e0 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java
@@ -24,7 +24,6 @@ import org.apache.beam.sdk.coders.DurationCoder;
 import org.apache.beam.sdk.coders.InstantCoder;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
-
 import org.joda.time.Duration;
 import org.joda.time.Instant;
 import org.joda.time.ReadableDuration;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java
index 40f3496..7267d00 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.transforms.windowing;
 
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger;
+
 import org.joda.time.Instant;
 
 import java.util.List;