You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by dh...@apache.org on 2016/08/04 01:26:09 UTC
[01/19] incubator-beam git commit: Port WordCount example from
OldDoFn to DoFn
Repository: incubator-beam
Updated Branches:
refs/heads/master 388816a80 -> 9a329aada
Port WordCount example from OldDoFn to DoFn
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/64481d0c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/64481d0c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/64481d0c
Branch: refs/heads/master
Commit: 64481d0c2ed52a075ca1f0aa9946155aa9b13119
Parents: 3bcb6f4
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:28:28 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:52 2016 -0700
----------------------------------------------------------------------
.../src/main/java/org/apache/beam/examples/WordCount.java | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/64481d0c/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/WordCount.java b/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
index 274d1ad..d3768a8 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
@@ -26,8 +26,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Count;
+import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.MapElements;
-import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SimpleFunction;
@@ -97,14 +97,14 @@ public class WordCount {
/**
* Concept #2: You can make your pipeline code less verbose by defining your DoFns statically out-
- * of-line. This OldDoFn tokenizes lines of text into individual words; we pass it to a ParDo in
- * the pipeline.
+ * of-line. This DoFn tokenizes lines of text into individual words; we pass it to a ParDo in the
+ * pipeline.
*/
- static class ExtractWordsFn extends OldDoFn<String, String> {
+ static class ExtractWordsFn extends DoFn<String, String> {
private final Aggregator<Long, Long> emptyLines =
createAggregator("emptyLines", new Sum.SumLongFn());
- @Override
+ @ProcessElement
public void processElement(ProcessContext c) {
if (c.element().trim().isEmpty()) {
emptyLines.addValue(1L);
[02/19] incubator-beam git commit: Rename DoFnWithContext to DoFn
Posted by dh...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
index 0a6eab0..7fe053c 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
@@ -168,17 +168,17 @@ public class ParDoTest implements Serializable {
public void processElement(OldDoFn<Integer, String>.ProcessContext c) throws Exception {}
}
- static class TestDoFnWithContext extends DoFnWithContext<Integer, String> {
+ static class TestDoFn extends DoFn<Integer, String> {
enum State { UNSTARTED, STARTED, PROCESSING, FINISHED }
State state = State.UNSTARTED;
final List<PCollectionView<Integer>> sideInputViews = new ArrayList<>();
final List<TupleTag<String>> sideOutputTupleTags = new ArrayList<>();
- public TestDoFnWithContext() {
+ public TestDoFn() {
}
- public TestDoFnWithContext(List<PCollectionView<Integer>> sideInputViews,
+ public TestDoFn(List<PCollectionView<Integer>> sideInputViews,
List<TupleTag<String>> sideOutputTupleTags) {
this.sideInputViews.addAll(sideInputViews);
this.sideOutputTupleTags.addAll(sideOutputTupleTags);
@@ -362,7 +362,7 @@ public class ParDoTest implements Serializable {
PCollection<String> output = pipeline
.apply(Create.of(inputs))
- .apply(ParDo.of(new TestDoFnWithContext()));
+ .apply(ParDo.of(new TestDoFn()));
PAssert.that(output)
.satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
@@ -1426,7 +1426,7 @@ public class ParDoTest implements Serializable {
@Test
public void testDoFnWithContextDisplayData() {
- DoFnWithContext<String, String> fn = new DoFnWithContext<String, String>() {
+ DoFn<String, String> fn = new DoFn<String, String>() {
@ProcessElement
public void proccessElement(ProcessContext c) {}
@@ -1445,7 +1445,7 @@ public class ParDoTest implements Serializable {
@Test
public void testWithOutputTagsDisplayData() {
- DoFnWithContext<String, String> fn = new DoFnWithContext<String, String>() {
+ DoFn<String, String> fn = new DoFn<String, String>() {
@ProcessElement
public void proccessElement(ProcessContext c) {}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/dofnreflector/DoFnReflectorTestHelper.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/dofnreflector/DoFnReflectorTestHelper.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/dofnreflector/DoFnReflectorTestHelper.java
index 5ff2bf1..90fba12 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/dofnreflector/DoFnReflectorTestHelper.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/dofnreflector/DoFnReflectorTestHelper.java
@@ -17,8 +17,8 @@
*/
package org.apache.beam.sdk.transforms.dofnreflector;
+import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.DoFnReflectorTest.Invocations;
-import org.apache.beam.sdk.transforms.DoFnWithContext;
/**
* Test helper for DoFnReflectorTest, which needs to test package-private access
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.transforms.DoFnWithContext;
*/
public class DoFnReflectorTestHelper {
- private static class StaticPrivateDoFn extends DoFnWithContext<String, String> {
+ private static class StaticPrivateDoFn extends DoFn<String, String> {
final Invocations invocations;
public StaticPrivateDoFn(Invocations invocations) {
@@ -39,7 +39,7 @@ public class DoFnReflectorTestHelper {
}
}
- private class InnerPrivateDoFn extends DoFnWithContext<String, String> {
+ private class InnerPrivateDoFn extends DoFn<String, String> {
final Invocations invocations;
public InnerPrivateDoFn(Invocations invocations) {
@@ -52,7 +52,7 @@ public class DoFnReflectorTestHelper {
}
}
- static class StaticPackagePrivateDoFn extends DoFnWithContext<String, String> {
+ static class StaticPackagePrivateDoFn extends DoFn<String, String> {
final Invocations invocations;
public StaticPackagePrivateDoFn(Invocations invocations) {
@@ -65,7 +65,7 @@ public class DoFnReflectorTestHelper {
}
}
- class InnerPackagePrivateDoFn extends DoFnWithContext<String, String> {
+ class InnerPackagePrivateDoFn extends DoFn<String, String> {
final Invocations invocations;
public InnerPackagePrivateDoFn(Invocations invocations) {
@@ -78,25 +78,25 @@ public class DoFnReflectorTestHelper {
}
}
- public static DoFnWithContext<String, String> newStaticPackagePrivateDoFn(
+ public static DoFn<String, String> newStaticPackagePrivateDoFn(
Invocations invocations) {
return new StaticPackagePrivateDoFn(invocations);
}
- public DoFnWithContext<String, String> newInnerPackagePrivateDoFn(Invocations invocations) {
+ public DoFn<String, String> newInnerPackagePrivateDoFn(Invocations invocations) {
return new InnerPackagePrivateDoFn(invocations);
}
- public static DoFnWithContext<String, String> newStaticPrivateDoFn(Invocations invocations) {
+ public static DoFn<String, String> newStaticPrivateDoFn(Invocations invocations) {
return new StaticPrivateDoFn(invocations);
}
- public DoFnWithContext<String, String> newInnerPrivateDoFn(Invocations invocations) {
+ public DoFn<String, String> newInnerPrivateDoFn(Invocations invocations) {
return new InnerPrivateDoFn(invocations);
}
- public DoFnWithContext<String, String> newInnerAnonymousDoFn(final Invocations invocations) {
- return new DoFnWithContext<String, String>() {
+ public DoFn<String, String> newInnerAnonymousDoFn(final Invocations invocations) {
+ return new DoFn<String, String>() {
@ProcessElement
public void process(ProcessContext c) {
invocations.wasProcessElementInvoked = true;
@@ -104,9 +104,9 @@ public class DoFnReflectorTestHelper {
};
}
- public static DoFnWithContext<String, String> newStaticAnonymousDoFn(
+ public static DoFn<String, String> newStaticAnonymousDoFn(
final Invocations invocations) {
- return new DoFnWithContext<String, String>() {
+ return new DoFn<String, String>() {
@ProcessElement
public void process(ProcessContext c) {
invocations.wasProcessElementInvoked = true;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
----------------------------------------------------------------------
diff --git a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
index 0da75f4..fd75e95 100644
--- a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
+++ b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
@@ -20,10 +20,10 @@ package org.apache.beam.sdk.microbenchmarks.transforms;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.DoFn.ExtraContextFactory;
import org.apache.beam.sdk.transforms.DoFnReflector;
import org.apache.beam.sdk.transforms.DoFnReflector.DoFnInvoker;
-import org.apache.beam.sdk.transforms.DoFnWithContext;
-import org.apache.beam.sdk.transforms.DoFnWithContext.ExtraContextFactory;
import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -40,7 +40,7 @@ import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
/**
- * Benchmarks for {@link OldDoFn} and {@link DoFnWithContext} invocations, specifically
+ * Benchmarks for {@link OldDoFn} and {@link DoFn} invocations, specifically
* for measuring the overhead of {@link DoFnReflector}.
*/
@State(Scope.Benchmark)
@@ -50,12 +50,12 @@ public class DoFnReflectorBenchmark {
private static final String ELEMENT = "some string to use for testing";
- private OldDoFn<String, String> doFn = new UpperCaseDoFn();
- private DoFnWithContext<String, String> doFnWithContext = new UpperCaseDoFnWithContext();
+ private OldDoFn<String, String> oldDoFn = new UpperCaseOldDoFn();
+ private DoFn<String, String> doFn = new UpperCaseDoFn();
- private StubDoFnProcessContext stubDoFnContext = new StubDoFnProcessContext(doFn, ELEMENT);
+ private StubDoFnProcessContext stubDoFnContext = new StubDoFnProcessContext(oldDoFn, ELEMENT);
private StubDoFnWithContextProcessContext stubDoFnWithContextContext =
- new StubDoFnWithContextProcessContext(doFnWithContext, ELEMENT);
+ new StubDoFnWithContextProcessContext(doFn, ELEMENT);
private ExtraContextFactory<String, String> extraContextFactory =
new ExtraContextFactory<String, String>() {
@@ -77,14 +77,14 @@ public class DoFnReflectorBenchmark {
@Setup
public void setUp() {
- doFnReflector = DoFnReflector.of(doFnWithContext.getClass());
- adaptedDoFnWithContext = doFnReflector.toDoFn(doFnWithContext);
- invoker = doFnReflector.bindInvoker(doFnWithContext);
+ doFnReflector = DoFnReflector.of(doFn.getClass());
+ adaptedDoFnWithContext = doFnReflector.toDoFn(doFn);
+ invoker = doFnReflector.bindInvoker(doFn);
}
@Benchmark
public String invokeDoFn() throws Exception {
- doFn.processElement(stubDoFnContext);
+ oldDoFn.processElement(stubDoFnContext);
return stubDoFnContext.output;
}
@@ -100,7 +100,7 @@ public class DoFnReflectorBenchmark {
return stubDoFnWithContextContext.output;
}
- private static class UpperCaseDoFn extends OldDoFn<String, String> {
+ private static class UpperCaseOldDoFn extends OldDoFn<String, String> {
@Override
public void processElement(ProcessContext c) throws Exception {
@@ -108,7 +108,7 @@ public class DoFnReflectorBenchmark {
}
}
- private static class UpperCaseDoFnWithContext extends DoFnWithContext<String, String> {
+ private static class UpperCaseDoFn extends DoFn<String, String> {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
@@ -187,11 +187,11 @@ public class DoFnReflectorBenchmark {
}
private static class StubDoFnWithContextProcessContext
- extends DoFnWithContext<String, String>.ProcessContext {
+ extends DoFn<String, String>.ProcessContext {
private final String element;
private String output;
- public StubDoFnWithContextProcessContext(DoFnWithContext<String, String> fn, String element) {
+ public StubDoFnWithContextProcessContext(DoFn<String, String> fn, String element) {
fn.super();
this.element = element;
}
[12/19] incubator-beam git commit: Rename DoFn to OldDoFn
Posted by dh...@apache.org.
Rename DoFn to OldDoFn
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/a64baf48
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/a64baf48
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/a64baf48
Branch: refs/heads/master
Commit: a64baf4878f28e98da696dacc587c1151d0cdb9e
Parents: 388816a
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 13:00:10 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:52 2016 -0700
----------------------------------------------------------------------
.../beam/examples/DebuggingWordCount.java | 6 +-
.../apache/beam/examples/MinimalWordCount.java | 7 +-
.../apache/beam/examples/WindowedWordCount.java | 10 +-
.../org/apache/beam/examples/WordCount.java | 8 +-
.../examples/common/PubsubFileInjector.java | 6 +-
.../beam/examples/complete/AutoComplete.java | 16 +-
.../examples/complete/StreamingWordExtract.java | 12 +-
.../apache/beam/examples/complete/TfIdf.java | 16 +-
.../examples/complete/TopWikipediaSessions.java | 12 +-
.../examples/complete/TrafficMaxLaneFlow.java | 10 +-
.../beam/examples/complete/TrafficRoutes.java | 12 +-
.../examples/cookbook/BigQueryTornadoes.java | 6 +-
.../cookbook/CombinePerKeyExamples.java | 6 +-
.../examples/cookbook/DatastoreWordCount.java | 11 +-
.../beam/examples/cookbook/FilterExamples.java | 12 +-
.../beam/examples/cookbook/JoinExamples.java | 10 +-
.../examples/cookbook/MaxPerKeyExamples.java | 6 +-
.../beam/examples/cookbook/TriggerExample.java | 12 +-
.../org/apache/beam/examples/WordCountTest.java | 2 +-
.../examples/complete/AutoCompleteTest.java | 4 +-
.../examples/cookbook/TriggerExampleTest.java | 4 +-
.../beam/examples/complete/game/GameStats.java | 10 +-
.../beam/examples/complete/game/UserScore.java | 4 +-
.../complete/game/utils/WriteToBigQuery.java | 12 +-
.../game/utils/WriteWindowedToBigQuery.java | 8 +-
.../examples/complete/game/UserScoreTest.java | 2 +-
.../core/GroupAlsoByWindowViaWindowSetDoFn.java | 12 +-
.../core/UnboundedReadFromBoundedSource.java | 2 +-
.../apache/beam/sdk/util/AssignWindowsDoFn.java | 10 +-
.../org/apache/beam/sdk/util/DoFnRunner.java | 21 +-
.../apache/beam/sdk/util/DoFnRunnerBase.java | 54 +-
.../org/apache/beam/sdk/util/DoFnRunners.java | 24 +-
.../beam/sdk/util/GroupAlsoByWindowsDoFn.java | 6 +-
.../GroupAlsoByWindowsViaOutputBufferDoFn.java | 4 +-
.../sdk/util/GroupByKeyViaGroupByKeyOnly.java | 6 +-
.../sdk/util/LateDataDroppingDoFnRunner.java | 4 +-
.../apache/beam/sdk/util/PaneInfoTracker.java | 1 -
.../apache/beam/sdk/util/ReduceFnRunner.java | 4 +-
.../apache/beam/sdk/util/SimpleDoFnRunner.java | 12 +-
.../org/apache/beam/sdk/util/WatermarkHold.java | 1 -
.../beam/sdk/util/ReduceFnRunnerTest.java | 1 +
.../apache/beam/sdk/util/ReduceFnTester.java | 1 +
.../beam/sdk/util/SimpleDoFnRunnerTest.java | 6 +-
.../GroupAlsoByWindowEvaluatorFactory.java | 4 +-
.../ImmutabilityCheckingBundleFactory.java | 4 +-
.../beam/runners/direct/ParDoEvaluator.java | 4 +-
.../direct/ParDoMultiEvaluatorFactory.java | 11 +-
.../direct/ParDoSingleEvaluatorFactory.java | 11 +-
.../direct/TransformEvaluatorFactory.java | 6 +-
.../direct/WriteWithShardingFactory.java | 4 +-
.../ConsumerTrackingPipelineVisitorTest.java | 22 +-
.../beam/runners/direct/DirectRunnerTest.java | 24 +-
.../ImmutabilityCheckingBundleFactoryTest.java | 6 +-
.../ImmutabilityEnforcementFactoryTest.java | 6 +-
.../direct/KeyedPValueTrackingVisitorTest.java | 6 +-
.../beam/runners/direct/ParDoEvaluatorTest.java | 6 +-
.../direct/ParDoMultiEvaluatorFactoryTest.java | 10 +-
.../direct/ParDoSingleEvaluatorFactoryTest.java | 10 +-
.../runners/direct/WatermarkManagerTest.java | 7 +-
.../beam/runners/flink/examples/TFIDF.java | 16 +-
.../beam/runners/flink/examples/WordCount.java | 4 +-
.../flink/examples/streaming/AutoComplete.java | 16 +-
.../flink/examples/streaming/JoinExamples.java | 8 +-
.../examples/streaming/KafkaIOExamples.java | 4 +-
.../KafkaWindowedWordCountExample.java | 6 +-
.../examples/streaming/WindowedWordCount.java | 6 +-
.../FlinkBatchTransformTranslators.java | 12 +-
.../FlinkStreamingTransformTranslators.java | 9 +-
.../functions/FlinkDoFnFunction.java | 10 +-
.../FlinkMergingNonShuffleReduceFunction.java | 8 +-
.../functions/FlinkMultiOutputDoFnFunction.java | 10 +-
.../FlinkMultiOutputProcessContext.java | 6 +-
.../functions/FlinkNoElementAssignContext.java | 8 +-
.../functions/FlinkPartialReduceFunction.java | 8 +-
.../functions/FlinkProcessContext.java | 16 +-
.../functions/FlinkReduceFunction.java | 8 +-
.../streaming/FlinkAbstractParDoWrapper.java | 18 +-
.../FlinkGroupAlsoByWindowWrapper.java | 10 +-
.../streaming/FlinkParDoBoundMultiWrapper.java | 4 +-
.../streaming/FlinkParDoBoundWrapper.java | 4 +-
.../state/AbstractFlinkTimerInternals.java | 4 +-
.../beam/runners/flink/PipelineOptionsTest.java | 6 +-
.../beam/runners/flink/ReadSourceITCase.java | 4 +-
.../flink/ReadSourceStreamingITCase.java | 4 +-
.../flink/streaming/GroupByNullKeyTest.java | 8 +-
.../streaming/TopWikipediaSessionsITCase.java | 6 +-
.../dataflow/DataflowPipelineTranslator.java | 6 +-
.../beam/runners/dataflow/DataflowRunner.java | 83 ++-
.../dataflow/internal/AssignWindows.java | 6 +-
.../beam/runners/dataflow/util/DoFnInfo.java | 16 +-
.../DataflowPipelineTranslatorTest.java | 10 +-
.../beam/runners/spark/examples/WordCount.java | 4 +-
.../runners/spark/translation/DoFnFunction.java | 8 +-
.../spark/translation/MultiDoFnFunction.java | 8 +-
.../spark/translation/SparkProcessContext.java | 18 +-
.../spark/translation/TransformTranslator.java | 7 +-
.../streaming/StreamingTransformTranslator.java | 4 +-
.../apache/beam/runners/spark/TfIdfTest.java | 12 +-
.../spark/translation/CombinePerKeyTest.java | 4 +-
.../spark/translation/DoFnOutputTest.java | 4 +-
.../translation/MultiOutputWordCountTest.java | 8 +-
.../spark/translation/SerializationTest.java | 10 +-
.../spark/translation/SideEffectsTest.java | 4 +-
.../streaming/KafkaStreamingTest.java | 4 +-
.../org/apache/beam/sdk/coders/AvroCoder.java | 1 -
.../apache/beam/sdk/coders/DurationCoder.java | 1 -
.../apache/beam/sdk/coders/InstantCoder.java | 1 -
.../java/org/apache/beam/sdk/io/PubsubIO.java | 6 +-
.../apache/beam/sdk/io/PubsubUnboundedSink.java | 8 +-
.../beam/sdk/io/PubsubUnboundedSource.java | 4 +-
.../java/org/apache/beam/sdk/io/Source.java | 2 +-
.../main/java/org/apache/beam/sdk/io/Write.java | 21 +-
.../org/apache/beam/sdk/options/GcpOptions.java | 1 -
.../beam/sdk/options/PipelineOptions.java | 8 +-
.../sdk/options/PipelineOptionsFactory.java | 1 -
.../sdk/options/PipelineOptionsReflector.java | 1 +
.../beam/sdk/runners/AggregatorValues.java | 4 +-
.../org/apache/beam/sdk/testing/PAssert.java | 24 +-
.../beam/sdk/testing/SerializableMatchers.java | 1 -
.../apache/beam/sdk/testing/TestPipeline.java | 1 -
.../beam/sdk/testing/TestPipelineOptions.java | 1 +
.../apache/beam/sdk/transforms/Aggregator.java | 14 +-
.../sdk/transforms/AggregatorRetriever.java | 6 +-
.../org/apache/beam/sdk/transforms/Combine.java | 14 +-
.../apache/beam/sdk/transforms/CombineFns.java | 4 +-
.../org/apache/beam/sdk/transforms/Count.java | 2 +-
.../org/apache/beam/sdk/transforms/Create.java | 2 +-
.../org/apache/beam/sdk/transforms/DoFn.java | 565 -------------------
.../beam/sdk/transforms/DoFnReflector.java | 38 +-
.../apache/beam/sdk/transforms/DoFnTester.java | 86 +--
.../beam/sdk/transforms/DoFnWithContext.java | 16 +-
.../org/apache/beam/sdk/transforms/Filter.java | 2 +-
.../beam/sdk/transforms/FlatMapElements.java | 2 +-
.../org/apache/beam/sdk/transforms/Flatten.java | 2 +-
.../apache/beam/sdk/transforms/GroupByKey.java | 2 +-
.../transforms/IntraBundleParallelization.java | 40 +-
.../org/apache/beam/sdk/transforms/Keys.java | 2 +-
.../org/apache/beam/sdk/transforms/KvSwap.java | 2 +-
.../apache/beam/sdk/transforms/MapElements.java | 2 +-
.../org/apache/beam/sdk/transforms/OldDoFn.java | 565 +++++++++++++++++++
.../apache/beam/sdk/transforms/PTransform.java | 2 +-
.../org/apache/beam/sdk/transforms/ParDo.java | 203 +++----
.../apache/beam/sdk/transforms/Partition.java | 2 +-
.../beam/sdk/transforms/RemoveDuplicates.java | 2 +-
.../org/apache/beam/sdk/transforms/Sample.java | 4 +-
.../beam/sdk/transforms/SimpleFunction.java | 6 +-
.../org/apache/beam/sdk/transforms/Values.java | 2 +-
.../org/apache/beam/sdk/transforms/View.java | 8 +-
.../apache/beam/sdk/transforms/WithKeys.java | 2 +-
.../beam/sdk/transforms/WithTimestamps.java | 4 +-
.../sdk/transforms/display/DisplayData.java | 1 -
.../beam/sdk/transforms/join/CoGbkResult.java | 1 -
.../beam/sdk/transforms/join/CoGroupByKey.java | 14 +-
.../sdk/transforms/windowing/AfterEach.java | 1 +
.../windowing/AfterProcessingTime.java | 1 +
.../transforms/windowing/IntervalWindow.java | 1 -
.../beam/sdk/transforms/windowing/Never.java | 1 +
.../beam/sdk/transforms/windowing/PaneInfo.java | 10 +-
.../beam/sdk/transforms/windowing/Window.java | 4 +-
.../beam/sdk/util/BaseExecutionContext.java | 4 +-
.../apache/beam/sdk/util/BucketingFunction.java | 1 +
.../beam/sdk/util/CombineContextFactory.java | 6 +-
.../apache/beam/sdk/util/ExecutionContext.java | 8 +-
.../apache/beam/sdk/util/MovingFunction.java | 1 +
.../beam/sdk/util/PerKeyCombineFnRunner.java | 44 +-
.../beam/sdk/util/PerKeyCombineFnRunners.java | 30 +-
.../org/apache/beam/sdk/util/PubsubClient.java | 1 +
.../apache/beam/sdk/util/PubsubTestClient.java | 1 +
.../sdk/util/ReifyTimestampAndWindowsDoFn.java | 6 +-
.../org/apache/beam/sdk/util/Reshuffle.java | 4 +-
.../apache/beam/sdk/util/SerializableUtils.java | 2 +-
.../org/apache/beam/sdk/util/StringUtils.java | 2 +-
.../beam/sdk/util/SystemDoFnInternal.java | 6 +-
.../apache/beam/sdk/util/TimerInternals.java | 1 -
.../apache/beam/sdk/util/ValueWithRecordId.java | 6 +-
.../org/apache/beam/sdk/util/WindowedValue.java | 1 -
.../beam/sdk/util/WindowingInternals.java | 4 +-
.../beam/sdk/util/common/ReflectHelpers.java | 1 +
.../beam/sdk/values/TimestampedValue.java | 1 -
.../java/org/apache/beam/sdk/PipelineTest.java | 6 +-
.../apache/beam/sdk/coders/AvroCoderTest.java | 4 +-
.../beam/sdk/coders/CoderRegistryTest.java | 6 +-
.../beam/sdk/coders/SerializableCoderTest.java | 6 +-
.../org/apache/beam/sdk/io/AvroSourceTest.java | 1 +
.../io/BoundedReadFromUnboundedSourceTest.java | 1 +
.../beam/sdk/io/CompressedSourceTest.java | 1 +
.../apache/beam/sdk/io/CountingInputTest.java | 5 +-
.../apache/beam/sdk/io/CountingSourceTest.java | 4 +-
.../beam/sdk/io/OffsetBasedSourceTest.java | 1 +
.../beam/sdk/io/PubsubUnboundedSinkTest.java | 4 +-
.../java/org/apache/beam/sdk/io/ReadTest.java | 1 +
.../java/org/apache/beam/sdk/io/TextIOTest.java | 1 +
.../java/org/apache/beam/sdk/io/WriteTest.java | 7 +-
.../org/apache/beam/sdk/io/XmlSinkTest.java | 1 +
.../apache/beam/sdk/options/GcpOptionsTest.java | 1 +
.../sdk/options/GoogleApiDebugOptionsTest.java | 1 -
.../sdk/options/PipelineOptionsFactoryTest.java | 1 -
.../beam/sdk/options/PipelineOptionsTest.java | 1 -
.../sdk/options/ProxyInvocationHandlerTest.java | 2 +-
.../AggregatorPipelineExtractorTest.java | 6 +-
.../apache/beam/sdk/testing/PAssertTest.java | 1 -
.../beam/sdk/testing/TestPipelineTest.java | 1 -
.../transforms/ApproximateQuantilesTest.java | 1 +
.../sdk/transforms/ApproximateUniqueTest.java | 5 +-
.../beam/sdk/transforms/CombineFnsTest.java | 2 +-
.../apache/beam/sdk/transforms/CombineTest.java | 12 +-
.../apache/beam/sdk/transforms/CreateTest.java | 2 +-
.../beam/sdk/transforms/DoFnContextTest.java | 69 ---
.../DoFnDelegatingAggregatorTest.java | 16 +-
.../beam/sdk/transforms/DoFnReflectorTest.java | 2 +-
.../apache/beam/sdk/transforms/DoFnTest.java | 242 --------
.../beam/sdk/transforms/DoFnTesterTest.java | 10 +-
.../sdk/transforms/DoFnWithContextTest.java | 6 +-
.../apache/beam/sdk/transforms/FlattenTest.java | 4 +-
.../beam/sdk/transforms/GroupByKeyTest.java | 6 +-
.../IntraBundleParallelizationTest.java | 23 +-
.../beam/sdk/transforms/MapElementsTest.java | 1 +
.../org/apache/beam/sdk/transforms/MaxTest.java | 1 +
.../org/apache/beam/sdk/transforms/MinTest.java | 2 +
.../apache/beam/sdk/transforms/NoOpDoFn.java | 20 +-
.../beam/sdk/transforms/OldDoFnContextTest.java | 69 +++
.../apache/beam/sdk/transforms/OldDoFnTest.java | 242 ++++++++
.../apache/beam/sdk/transforms/ParDoTest.java | 96 ++--
.../beam/sdk/transforms/PartitionTest.java | 1 +
.../apache/beam/sdk/transforms/SampleTest.java | 1 +
.../org/apache/beam/sdk/transforms/TopTest.java | 1 +
.../apache/beam/sdk/transforms/ViewTest.java | 398 ++++++-------
.../beam/sdk/transforms/WithTimestampsTest.java | 8 +-
.../display/DisplayDataEvaluatorTest.java | 6 +-
.../display/DisplayDataMatchersTest.java | 1 +
.../sdk/transforms/display/DisplayDataTest.java | 6 +-
.../sdk/transforms/join/CoGroupByKeyTest.java | 18 +-
.../sdk/transforms/windowing/NeverTest.java | 1 +
.../sdk/transforms/windowing/WindowTest.java | 6 +-
.../sdk/transforms/windowing/WindowingTest.java | 10 +-
.../beam/sdk/util/BucketingFunctionTest.java | 4 +-
.../beam/sdk/util/MovingFunctionTest.java | 4 +-
.../beam/sdk/util/SerializableUtilsTest.java | 1 -
.../apache/beam/sdk/util/SerializerTest.java | 1 -
.../apache/beam/sdk/util/StringUtilsTest.java | 16 +-
.../org/apache/beam/sdk/util/TriggerTester.java | 1 +
.../beam/sdk/util/common/CounterTest.java | 1 +
.../beam/sdk/values/PCollectionTupleTest.java | 4 +-
.../apache/beam/sdk/values/TypedPValueTest.java | 6 +-
.../beam/sdk/extensions/joinlibrary/Join.java | 8 +-
.../beam/sdk/io/gcp/bigquery/BigQueryIO.java | 18 +-
.../beam/sdk/io/gcp/bigtable/BigtableIO.java | 4 +-
.../beam/sdk/io/gcp/datastore/V1Beta3.java | 13 +-
.../sdk/io/gcp/bigquery/BigQueryIOTest.java | 6 +-
.../sdk/io/gcp/bigtable/BigtableWriteIT.java | 4 +-
.../sdk/io/gcp/datastore/V1Beta3TestUtil.java | 6 +-
.../java/org/apache/beam/sdk/io/jms/JmsIO.java | 4 +-
.../org/apache/beam/sdk/io/kafka/KafkaIO.java | 8 +-
.../apache/beam/sdk/io/kafka/KafkaIOTest.java | 7 +-
.../sdk/transforms/WithTimestampsJava8Test.java | 4 +-
.../src/main/java/DebuggingWordCount.java | 4 +-
.../src/main/java/MinimalWordCount.java | 6 +-
.../src/main/java/WindowedWordCount.java | 6 +-
.../src/main/java/WordCount.java | 6 +-
.../main/java/common/PubsubFileInjector.java | 4 +-
.../src/main/java/StarterPipeline.java | 6 +-
.../src/main/java/it/pkg/StarterPipeline.java | 6 +-
.../transforms/DoFnReflectorBenchmark.java | 14 +-
263 files changed, 2196 insertions(+), 2151 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
index 8d85d44..3c43152 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
import org.apache.beam.sdk.values.KV;
@@ -106,8 +106,8 @@ import java.util.regex.Pattern;
* overridden with {@code --inputFile}.
*/
public class DebuggingWordCount {
- /** A DoFn that filters for a specific key based upon a regular expression. */
- public static class FilterTextFn extends DoFn<KV<String, Long>, KV<String, Long>> {
+ /** A OldDoFn that filters for a specific key based upon a regular expression. */
+ public static class FilterTextFn extends OldDoFn<KV<String, Long>, KV<String, Long>> {
/**
* Concept #1: The logger below uses the fully qualified class name of FilterTextFn
* as the logger. All log statements emitted by this logger will be referenced by this name
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
index 9f6d61a..ab0bb6d 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
@@ -22,8 +22,8 @@ import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SimpleFunction;
import org.apache.beam.sdk.values.KV;
@@ -89,10 +89,11 @@ public class MinimalWordCount {
// the input text (a set of Shakespeare's texts).
p.apply(TextIO.Read.from("gs://dataflow-samples/shakespeare/*"))
// Concept #2: Apply a ParDo transform to our PCollection of text lines. This ParDo invokes a
- // DoFn (defined in-line) on each element that tokenizes the text line into individual words.
+ // OldDoFn (defined in-line) on each element that tokenizes the text line into individua
+ // words.
// The ParDo returns a PCollection<String>, where each element is an individual word in
// Shakespeare's collected texts.
- .apply("ExtractWords", ParDo.of(new DoFn<String, String>() {
+ .apply("ExtractWords", ParDo.of(new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) {
for (String word : c.element().split("[^a-zA-Z']+")) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
index 7a4b29f..17f7da3 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.FixedWindows;
import org.apache.beam.sdk.transforms.windowing.Window;
@@ -103,14 +103,14 @@ public class WindowedWordCount {
static final int WINDOW_SIZE = 1; // Default window duration in minutes
/**
- * Concept #2: A DoFn that sets the data element timestamp. This is a silly method, just for
+ * Concept #2: A OldDoFn that sets the data element timestamp. This is a silly method, just for
* this example, for the bounded data case.
*
* <p>Imagine that many ghosts of Shakespeare are all typing madly at the same time to recreate
* his masterworks. Each line of the corpus will get a random associated timestamp somewhere in a
* 2-hour period.
*/
- static class AddTimestampFn extends DoFn<String, String> {
+ static class AddTimestampFn extends OldDoFn<String, String> {
private static final Duration RAND_RANGE = Duration.standardHours(2);
private final Instant minTimestamp;
@@ -130,8 +130,8 @@ public class WindowedWordCount {
}
}
- /** A DoFn that converts a Word and Count into a BigQuery table row. */
- static class FormatAsTableRowFn extends DoFn<KV<String, Long>, TableRow> {
+ /** A OldDoFn that converts a Word and Count into a BigQuery table row. */
+ static class FormatAsTableRowFn extends OldDoFn<KV<String, Long>, TableRow> {
@Override
public void processElement(ProcessContext c) {
TableRow row = new TableRow()
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/WordCount.java b/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
index af16c44..274d1ad 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/WordCount.java
@@ -26,8 +26,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SimpleFunction;
@@ -97,10 +97,10 @@ public class WordCount {
/**
* Concept #2: You can make your pipeline code less verbose by defining your DoFns statically out-
- * of-line. This DoFn tokenizes lines of text into individual words; we pass it to a ParDo in the
- * pipeline.
+ * of-line. This OldDoFn tokenizes lines of text into individual words; we pass it to a ParDo in
+ * the pipeline.
*/
- static class ExtractWordsFn extends DoFn<String, String> {
+ static class ExtractWordsFn extends OldDoFn<String, String> {
private final Aggregator<Long, Long> emptyLines =
createAggregator("emptyLines", new Sum.SumLongFn());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java b/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java
index 15eda06..0a93521 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.PubsubOptions;
import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.IntraBundleParallelization;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.util.Transport;
import com.google.api.services.pubsub.Pubsub;
@@ -71,8 +71,8 @@ public class PubsubFileInjector {
}
}
- /** A DoFn that publishes non-empty lines to Google Cloud PubSub. */
- public static class Bound extends DoFn<String, Void> {
+ /** A OldDoFn that publishes non-empty lines to Google Cloud PubSub. */
+ public static class Bound extends OldDoFn<String, Void> {
private final String outputTopic;
private final String timestampLabelKey;
public transient Pubsub pubsub;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
index c6272e8..7b44af8 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
@@ -36,9 +36,9 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.StreamingOptions;
import org.apache.beam.sdk.options.Validation;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Filter;
import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Partition;
@@ -130,7 +130,7 @@ public class AutoComplete {
// Map the KV outputs of Count into our own CompletionCandiate class.
.apply("CreateCompletionCandidates", ParDo.of(
- new DoFn<KV<String, Long>, CompletionCandidate>() {
+ new OldDoFn<KV<String, Long>, CompletionCandidate>() {
@Override
public void processElement(ProcessContext c) {
c.output(new CompletionCandidate(c.element().getKey(), c.element().getValue()));
@@ -209,7 +209,7 @@ public class AutoComplete {
}
private static class FlattenTops
- extends DoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
+ extends OldDoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
@Override
public void processElement(ProcessContext c) {
for (CompletionCandidate cc : c.element().getValue()) {
@@ -260,10 +260,10 @@ public class AutoComplete {
}
/**
- * A DoFn that keys each candidate by all its prefixes.
+ * A OldDoFn that keys each candidate by all its prefixes.
*/
private static class AllPrefixes
- extends DoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
+ extends OldDoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
private final int minPrefix;
private final int maxPrefix;
public AllPrefixes(int minPrefix) {
@@ -341,7 +341,7 @@ public class AutoComplete {
/**
* Takes as input a set of strings, and emits each #hashtag found therein.
*/
- static class ExtractHashtags extends DoFn<String, String> {
+ static class ExtractHashtags extends OldDoFn<String, String> {
@Override
public void processElement(ProcessContext c) {
Matcher m = Pattern.compile("#\\S+").matcher(c.element());
@@ -351,7 +351,7 @@ public class AutoComplete {
}
}
- static class FormatForBigquery extends DoFn<KV<String, List<CompletionCandidate>>, TableRow> {
+ static class FormatForBigquery extends OldDoFn<KV<String, List<CompletionCandidate>>, TableRow> {
@Override
public void processElement(ProcessContext c) {
List<TableRow> completions = new ArrayList<>();
@@ -385,7 +385,7 @@ public class AutoComplete {
* Takes as input a the top candidates per prefix, and emits an entity
* suitable for writing to Datastore.
*/
- static class FormatForDatastore extends DoFn<KV<String, List<CompletionCandidate>>, Entity> {
+ static class FormatForDatastore extends OldDoFn<KV<String, List<CompletionCandidate>>, Entity> {
private String kind;
public FormatForDatastore(String kind) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java b/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
index db646a5..b0c9ffd 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java
@@ -28,7 +28,7 @@ import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.StreamingOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import com.google.api.services.bigquery.model.TableFieldSchema;
@@ -55,8 +55,8 @@ import java.util.ArrayList;
*/
public class StreamingWordExtract {
- /** A DoFn that tokenizes lines of text into individual words. */
- static class ExtractWords extends DoFn<String, String> {
+ /** A OldDoFn that tokenizes lines of text into individual words. */
+ static class ExtractWords extends OldDoFn<String, String> {
@Override
public void processElement(ProcessContext c) {
String[] words = c.element().split("[^a-zA-Z']+");
@@ -68,8 +68,8 @@ public class StreamingWordExtract {
}
}
- /** A DoFn that uppercases a word. */
- static class Uppercase extends DoFn<String, String> {
+ /** A OldDoFn that uppercases a word. */
+ static class Uppercase extends OldDoFn<String, String> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().toUpperCase());
@@ -79,7 +79,7 @@ public class StreamingWordExtract {
/**
* Converts strings into BigQuery rows.
*/
- static class StringToRowConverter extends DoFn<String, TableRow> {
+ static class StringToRowConverter extends OldDoFn<String, TableRow> {
/**
* In this example, put the whole string into single BigQuery field.
*/
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
index 8305314..470a689 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java
@@ -30,9 +30,9 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.Keys;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.RemoveDuplicates;
@@ -225,7 +225,7 @@ public class TfIdf {
// of the words in the document associated with that that URI.
PCollection<KV<URI, String>> uriToWords = uriToContent
.apply("SplitWords", ParDo.of(
- new DoFn<KV<URI, String>, KV<URI, String>>() {
+ new OldDoFn<KV<URI, String>, KV<URI, String>>() {
@Override
public void processElement(ProcessContext c) {
URI uri = c.element().getKey();
@@ -268,7 +268,7 @@ public class TfIdf {
// by the URI key.
PCollection<KV<URI, KV<String, Long>>> uriToWordAndCount = uriAndWordToCount
.apply("ShiftKeys", ParDo.of(
- new DoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
+ new OldDoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
@Override
public void processElement(ProcessContext c) {
URI uri = c.element().getKey().getKey();
@@ -307,7 +307,7 @@ public class TfIdf {
// divided by the total number of words in the document.
PCollection<KV<String, KV<URI, Double>>> wordToUriAndTf = uriToWordAndCountAndTotal
.apply("ComputeTermFrequencies", ParDo.of(
- new DoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+ new OldDoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
@Override
public void processElement(ProcessContext c) {
URI uri = c.element().getKey();
@@ -328,11 +328,11 @@ public class TfIdf {
// documents in which the word appears divided by the total
// number of documents in the corpus. Note how the total number of
// documents is passed as a side input; the same value is
- // presented to each invocation of the DoFn.
+ // presented to each invocation of the OldDoFn.
PCollection<KV<String, Double>> wordToDf = wordToDocCount
.apply("ComputeDocFrequencies", ParDo
.withSideInputs(totalDocuments)
- .of(new DoFn<KV<String, Long>, KV<String, Double>>() {
+ .of(new OldDoFn<KV<String, Long>, KV<String, Double>>() {
@Override
public void processElement(ProcessContext c) {
String word = c.element().getKey();
@@ -361,7 +361,7 @@ public class TfIdf {
// divided by the log of the document frequency.
PCollection<KV<String, KV<URI, Double>>> wordToUriAndTfIdf = wordToUriAndTfAndDf
.apply("ComputeTfIdf", ParDo.of(
- new DoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+ new OldDoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
@Override
public void processElement(ProcessContext c) {
String word = c.element().getKey();
@@ -400,7 +400,7 @@ public class TfIdf {
@Override
public PDone apply(PCollection<KV<String, KV<URI, Double>>> wordToUriAndTfIdf) {
return wordToUriAndTfIdf
- .apply("Format", ParDo.of(new DoFn<KV<String, KV<URI, Double>>, String>() {
+ .apply("Format", ParDo.of(new OldDoFn<KV<String, KV<URI, Double>>, String>() {
@Override
public void processElement(ProcessContext c) {
c.output(String.format("%s,\t%s,\t%f",
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
index f8af02a..0ed89d2 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java
@@ -26,8 +26,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableComparator;
@@ -85,7 +85,7 @@ public class TopWikipediaSessions {
/**
* Extracts user and timestamp from a TableRow representing a Wikipedia edit.
*/
- static class ExtractUserAndTimestamp extends DoFn<TableRow, String> {
+ static class ExtractUserAndTimestamp extends OldDoFn<TableRow, String> {
@Override
public void processElement(ProcessContext c) {
TableRow row = c.element();
@@ -132,7 +132,7 @@ public class TopWikipediaSessions {
}
}
- static class SessionsToStringsDoFn extends DoFn<KV<String, Long>, KV<String, Long>>
+ static class SessionsToStringsDoFn extends OldDoFn<KV<String, Long>, KV<String, Long>>
implements RequiresWindowAccess {
@Override
@@ -142,7 +142,7 @@ public class TopWikipediaSessions {
}
}
- static class FormatOutputDoFn extends DoFn<List<KV<String, Long>>, String>
+ static class FormatOutputDoFn extends OldDoFn<List<KV<String, Long>>, String>
implements RequiresWindowAccess {
@Override
public void processElement(ProcessContext c) {
@@ -168,7 +168,7 @@ public class TopWikipediaSessions {
.apply(ParDo.of(new ExtractUserAndTimestamp()))
.apply("SampleUsers", ParDo.of(
- new DoFn<String, String>() {
+ new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) {
if (Math.abs(c.element().hashCode()) <= Integer.MAX_VALUE * samplingThreshold) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
index 7b1496f..9122015 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java
@@ -30,7 +30,7 @@ import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -145,12 +145,12 @@ public class TrafficMaxLaneFlow {
/**
* Extract the timestamp field from the input string, and use it as the element timestamp.
*/
- static class ExtractTimestamps extends DoFn<String, String> {
+ static class ExtractTimestamps extends OldDoFn<String, String> {
private static final DateTimeFormatter dateTimeFormat =
DateTimeFormat.forPattern("MM/dd/yyyy HH:mm:ss");
@Override
- public void processElement(DoFn<String, String>.ProcessContext c) throws Exception {
+ public void processElement(OldDoFn<String, String>.ProcessContext c) throws Exception {
String[] items = c.element().split(",");
if (items.length > 0) {
try {
@@ -170,7 +170,7 @@ public class TrafficMaxLaneFlow {
* information. The number of lanes for which data is present depends upon which freeway the data
* point comes from.
*/
- static class ExtractFlowInfoFn extends DoFn<String, KV<String, LaneInfo>> {
+ static class ExtractFlowInfoFn extends OldDoFn<String, KV<String, LaneInfo>> {
@Override
public void processElement(ProcessContext c) {
@@ -226,7 +226,7 @@ public class TrafficMaxLaneFlow {
* Format the results of the Max Lane flow calculation to a TableRow, to save to BigQuery.
* Add the timestamp from the window context.
*/
- static class FormatMaxesFn extends DoFn<KV<String, LaneInfo>, TableRow> {
+ static class FormatMaxesFn extends OldDoFn<KV<String, LaneInfo>, TableRow> {
@Override
public void processElement(ProcessContext c) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
index ebf7b9a..30091b6 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java
@@ -29,8 +29,8 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
@@ -149,12 +149,12 @@ public class TrafficRoutes {
/**
* Extract the timestamp field from the input string, and use it as the element timestamp.
*/
- static class ExtractTimestamps extends DoFn<String, String> {
+ static class ExtractTimestamps extends OldDoFn<String, String> {
private static final DateTimeFormatter dateTimeFormat =
DateTimeFormat.forPattern("MM/dd/yyyy HH:mm:ss");
@Override
- public void processElement(DoFn<String, String>.ProcessContext c) throws Exception {
+ public void processElement(OldDoFn<String, String>.ProcessContext c) throws Exception {
String[] items = c.element().split(",");
String timestamp = tryParseTimestamp(items);
if (timestamp != null) {
@@ -171,7 +171,7 @@ public class TrafficRoutes {
* Filter out readings for the stations along predefined 'routes', and output
* (station, speed info) keyed on route.
*/
- static class ExtractStationSpeedFn extends DoFn<String, KV<String, StationSpeed>> {
+ static class ExtractStationSpeedFn extends OldDoFn<String, KV<String, StationSpeed>> {
@Override
public void processElement(ProcessContext c) {
@@ -200,7 +200,7 @@ public class TrafficRoutes {
* Note: these calculations are for example purposes only, and are unrealistic and oversimplified.
*/
static class GatherStats
- extends DoFn<KV<String, Iterable<StationSpeed>>, KV<String, RouteInfo>> {
+ extends OldDoFn<KV<String, Iterable<StationSpeed>>, KV<String, RouteInfo>> {
@Override
public void processElement(ProcessContext c) throws IOException {
String route = c.element().getKey();
@@ -243,7 +243,7 @@ public class TrafficRoutes {
/**
* Format the results of the slowdown calculations to a TableRow, to save to BigQuery.
*/
- static class FormatStatsFn extends DoFn<KV<String, RouteInfo>, TableRow> {
+ static class FormatStatsFn extends OldDoFn<KV<String, RouteInfo>, TableRow> {
@Override
public void processElement(ProcessContext c) {
RouteInfo routeInfo = c.element().getValue();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
index 665be01..6002b11 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java
@@ -25,7 +25,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.KV;
@@ -81,7 +81,7 @@ public class BigQueryTornadoes {
* Examines each row in the input table. If a tornado was recorded
* in that sample, the month in which it occurred is output.
*/
- static class ExtractTornadoesFn extends DoFn<TableRow, Integer> {
+ static class ExtractTornadoesFn extends OldDoFn<TableRow, Integer> {
@Override
public void processElement(ProcessContext c){
TableRow row = c.element();
@@ -95,7 +95,7 @@ public class BigQueryTornadoes {
* Prepares the data for writing to BigQuery by building a TableRow object containing an
* integer representation of month and the number of tornadoes that occurred in each month.
*/
- static class FormatCountsFn extends DoFn<KV<Integer, Long>, TableRow> {
+ static class FormatCountsFn extends OldDoFn<KV<Integer, Long>, TableRow> {
@Override
public void processElement(ProcessContext c) {
TableRow row = new TableRow()
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
index 252f3cc..d0bce5d 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -90,7 +90,7 @@ public class CombinePerKeyExamples {
* Examines each row in the input table. If the word is greater than or equal to MIN_WORD_LENGTH,
* outputs word, play_name.
*/
- static class ExtractLargeWordsFn extends DoFn<TableRow, KV<String, String>> {
+ static class ExtractLargeWordsFn extends OldDoFn<TableRow, KV<String, String>> {
private final Aggregator<Long, Long> smallerWords =
createAggregator("smallerWords", new Sum.SumLongFn());
@@ -114,7 +114,7 @@ public class CombinePerKeyExamples {
* Prepares the data for writing to BigQuery by building a TableRow object
* containing a word with a string listing the plays in which it appeared.
*/
- static class FormatShakespeareOutputFn extends DoFn<KV<String, String>, TableRow> {
+ static class FormatShakespeareOutputFn extends OldDoFn<KV<String, String>, TableRow> {
@Override
public void processElement(ProcessContext c) {
TableRow row = new TableRow()
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
index 847523b..1850e89 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
@@ -32,8 +32,8 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import com.google.datastore.v1beta3.Entity;
@@ -44,7 +44,6 @@ import com.google.datastore.v1beta3.Value;
import java.util.Map;
import java.util.UUID;
-
import javax.annotation.Nullable;
/**
@@ -80,10 +79,10 @@ import javax.annotation.Nullable;
public class DatastoreWordCount {
/**
- * A DoFn that gets the content of an entity (one line in a
+ * A OldDoFn that gets the content of an entity (one line in a
* Shakespeare play) and converts it to a string.
*/
- static class GetContentFn extends DoFn<Entity, String> {
+ static class GetContentFn extends OldDoFn<Entity, String> {
@Override
public void processElement(ProcessContext c) {
Map<String, Value> props = c.element().getProperties();
@@ -109,9 +108,9 @@ public class DatastoreWordCount {
}
/**
- * A DoFn that creates entity for every line in Shakespeare.
+ * A OldDoFn that creates entity for every line in Shakespeare.
*/
- static class CreateEntityFn extends DoFn<String, Entity> {
+ static class CreateEntityFn extends OldDoFn<String, Entity> {
private final String namespace;
private final String kind;
private final Key ancestorKey;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
index ea1dcf6..06fba77 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Mean;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.View;
@@ -98,7 +98,7 @@ public class FilterExamples {
* Examines each row in the input table. Outputs only the subset of the cells this example
* is interested in-- the mean_temp and year, month, and day-- as a bigquery table row.
*/
- static class ProjectionFn extends DoFn<TableRow, TableRow> {
+ static class ProjectionFn extends OldDoFn<TableRow, TableRow> {
@Override
public void processElement(ProcessContext c){
TableRow row = c.element();
@@ -119,9 +119,9 @@ public class FilterExamples {
* Implements 'filter' functionality.
*
* <p>Examines each row in the input table. Outputs only rows from the month
- * monthFilter, which is passed in as a parameter during construction of this DoFn.
+ * monthFilter, which is passed in as a parameter during construction of this OldDoFn.
*/
- static class FilterSingleMonthDataFn extends DoFn<TableRow, TableRow> {
+ static class FilterSingleMonthDataFn extends OldDoFn<TableRow, TableRow> {
Integer monthFilter;
public FilterSingleMonthDataFn(Integer monthFilter) {
@@ -143,7 +143,7 @@ public class FilterExamples {
* Examines each row (weather reading) in the input table. Output the temperature
* reading for that row ('mean_temp').
*/
- static class ExtractTempFn extends DoFn<TableRow, Double> {
+ static class ExtractTempFn extends OldDoFn<TableRow, Double> {
@Override
public void processElement(ProcessContext c){
TableRow row = c.element();
@@ -191,7 +191,7 @@ public class FilterExamples {
PCollection<TableRow> filteredRows = monthFilteredRows
.apply("ParseAndFilter", ParDo
.withSideInputs(globalMeanTemp)
- .of(new DoFn<TableRow, TableRow>() {
+ .of(new OldDoFn<TableRow, TableRow>() {
@Override
public void processElement(ProcessContext c) {
Double meanTemp = Double.parseDouble(c.element().get("mean_temp").toString());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
index 1b43cc2..5260c0d 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.join.CoGbkResult;
import org.apache.beam.sdk.transforms.join.CoGroupByKey;
@@ -99,7 +99,7 @@ public class JoinExamples {
// country code 'key' -> string of <event info>, <country name>
PCollection<KV<String, String>> finalResultCollection =
kvpCollection.apply("Process", ParDo.of(
- new DoFn<KV<String, CoGbkResult>, KV<String, String>>() {
+ new OldDoFn<KV<String, CoGbkResult>, KV<String, String>>() {
@Override
public void processElement(ProcessContext c) {
KV<String, CoGbkResult> e = c.element();
@@ -116,7 +116,7 @@ public class JoinExamples {
// write to GCS
PCollection<String> formattedResults = finalResultCollection
- .apply("Format", ParDo.of(new DoFn<KV<String, String>, String>() {
+ .apply("Format", ParDo.of(new OldDoFn<KV<String, String>, String>() {
@Override
public void processElement(ProcessContext c) {
String outputstring = "Country code: " + c.element().getKey()
@@ -131,7 +131,7 @@ public class JoinExamples {
* Examines each row (event) in the input table. Output a KV with the key the country
* code of the event, and the value a string encoding event information.
*/
- static class ExtractEventDataFn extends DoFn<TableRow, KV<String, String>> {
+ static class ExtractEventDataFn extends OldDoFn<TableRow, KV<String, String>> {
@Override
public void processElement(ProcessContext c) {
TableRow row = c.element();
@@ -149,7 +149,7 @@ public class JoinExamples {
* Examines each row (country info) in the input table. Output a KV with the key the country
* code, and the value the country name.
*/
- static class ExtractCountryInfoFn extends DoFn<TableRow, KV<String, String>> {
+ static class ExtractCountryInfoFn extends OldDoFn<TableRow, KV<String, String>> {
@Override
public void processElement(ProcessContext c) {
TableRow row = c.element();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
index a37690b..1bcb491 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Max;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.KV;
@@ -82,7 +82,7 @@ public class MaxPerKeyExamples {
* Examines each row (weather reading) in the input table. Output the month of the reading,
* and the mean_temp.
*/
- static class ExtractTempFn extends DoFn<TableRow, KV<Integer, Double>> {
+ static class ExtractTempFn extends OldDoFn<TableRow, KV<Integer, Double>> {
@Override
public void processElement(ProcessContext c) {
TableRow row = c.element();
@@ -96,7 +96,7 @@ public class MaxPerKeyExamples {
* Format the results to a TableRow, to save to BigQuery.
*
*/
- static class FormatMaxesFn extends DoFn<KV<Integer, Double>, TableRow> {
+ static class FormatMaxesFn extends OldDoFn<KV<Integer, Double>, TableRow> {
@Override
public void processElement(ProcessContext c) {
TableRow row = new TableRow()
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
index a0c5181..0be9921 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java
@@ -28,9 +28,9 @@ import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.StreamingOptions;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.AfterEach;
@@ -342,7 +342,7 @@ public class TriggerExample {
.apply(GroupByKey.<String, Integer>create());
PCollection<KV<String, String>> results = flowPerFreeway.apply(ParDo.of(
- new DoFn <KV<String, Iterable<Integer>>, KV<String, String>>() {
+ new OldDoFn<KV<String, Iterable<Integer>>, KV<String, String>>() {
@Override
public void processElement(ProcessContext c) throws Exception {
@@ -365,7 +365,7 @@ public class TriggerExample {
* Format the results of the Total flow calculation to a TableRow, to save to BigQuery.
* Adds the triggerType, pane information, processing time and the window timestamp.
* */
- static class FormatTotalFlow extends DoFn<KV<String, String>, TableRow>
+ static class FormatTotalFlow extends OldDoFn<KV<String, String>, TableRow>
implements RequiresWindowAccess {
private String triggerType;
@@ -394,7 +394,7 @@ public class TriggerExample {
* Extract the freeway and total flow in a reading.
* Freeway is used as key since we are calculating the total flow for each freeway.
*/
- static class ExtractFlowInfo extends DoFn<String, KV<String, Integer>> {
+ static class ExtractFlowInfo extends OldDoFn<String, KV<String, Integer>> {
@Override
public void processElement(ProcessContext c) throws Exception {
String[] laneInfo = c.element().split(",");
@@ -471,7 +471,7 @@ public class TriggerExample {
* Add current time to each record.
* Also insert a delay at random to demo the triggers.
*/
- public static class InsertDelays extends DoFn<String, String> {
+ public static class InsertDelays extends OldDoFn<String, String> {
private static final double THRESHOLD = 0.001;
// MIN_DELAY and MAX_DELAY in minutes.
private static final int MIN_DELAY = 1;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
----------------------------------------------------------------------
diff --git a/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java b/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
index ff117dc..26bf8fb 100644
--- a/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
+++ b/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java
@@ -46,7 +46,7 @@ import java.util.List;
@RunWith(JUnit4.class)
public class WordCountTest {
- /** Example test that tests a specific DoFn. */
+ /** Example test that tests a specific OldDoFn. */
@Test
public void testExtractWordsFn() throws Exception {
DoFnTester<String, String> extractWordsFn =
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
----------------------------------------------------------------------
diff --git a/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java b/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
index b2ed9a2..6f68ce8 100644
--- a/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
+++ b/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java
@@ -23,8 +23,8 @@ import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Filter;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -171,7 +171,7 @@ public class AutoCompleteTest implements Serializable {
extends PTransform<PCollection<TimestampedValue<T>>, PCollection<T>> {
@Override
public PCollection<T> apply(PCollection<TimestampedValue<T>> input) {
- return input.apply(ParDo.of(new DoFn<TimestampedValue<T>, T>() {
+ return input.apply(ParDo.of(new OldDoFn<TimestampedValue<T>, T>() {
@Override
public void processElement(ProcessContext c) {
c.outputWithTimestamp(c.element().getValue(), c.element().getTimestamp());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
----------------------------------------------------------------------
diff --git a/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java b/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
index 6f58389..e72a9e8 100644
--- a/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
+++ b/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.RunnableOnService;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.DoFnTester;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.FixedWindows;
import org.apache.beam.sdk.transforms.windowing.Window;
@@ -141,7 +141,7 @@ public class TriggerExampleTest {
return Joiner.on(",").join(entries);
}
- static class FormatResults extends DoFn<TableRow, String> {
+ static class FormatResults extends OldDoFn<TableRow, String> {
@Override
public void processElement(ProcessContext c) throws Exception {
TableRow element = c.element();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
index 33b8727..b1407f6 100644
--- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
+++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java
@@ -27,10 +27,10 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.MapElements;
import org.apache.beam.sdk.transforms.Mean;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
@@ -126,7 +126,7 @@ public class GameStats extends LeaderBoard {
.apply("ProcessAndFilter", ParDo
// use the derived mean total score as a side input
.withSideInputs(globalMeanScore)
- .of(new DoFn<KV<String, Integer>, KV<String, Integer>>() {
+ .of(new OldDoFn<KV<String, Integer>, KV<String, Integer>>() {
private final Aggregator<Long, Long> numSpammerUsers =
createAggregator("SpammerUsers", new Sum.SumLongFn());
@Override
@@ -149,7 +149,7 @@ public class GameStats extends LeaderBoard {
/**
* Calculate and output an element's session duration.
*/
- private static class UserSessionInfoFn extends DoFn<KV<String, Integer>, Integer>
+ private static class UserSessionInfoFn extends OldDoFn<KV<String, Integer>, Integer>
implements RequiresWindowAccess {
@Override
@@ -281,7 +281,7 @@ public class GameStats extends LeaderBoard {
// Filter out the detected spammer users, using the side input derived above.
.apply("FilterOutSpammers", ParDo
.withSideInputs(spammersView)
- .of(new DoFn<GameActionInfo, GameActionInfo>() {
+ .of(new OldDoFn<GameActionInfo, GameActionInfo>() {
@Override
public void processElement(ProcessContext c) {
// If the user is not in the spammers Map, output the data element.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
index 28614cb..00dc8a4 100644
--- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
+++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java
@@ -28,8 +28,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
@@ -123,7 +123,7 @@ public class UserScore {
* user2_AsparagusPig,AsparagusPig,10,1445230923951,2015-11-02 09:09:28.224
* The human-readable time string is not used here.
*/
- static class ParseEventFn extends DoFn<String, GameActionInfo> {
+ static class ParseEventFn extends OldDoFn<String, GameActionInfo> {
// Log and count parse errors.
private static final Logger LOG = LoggerFactory.getLogger(ParseEventFn.class);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java
index 36ed195..6af6e15 100644
--- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java
+++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition;
import org.apache.beam.sdk.options.GcpOptions;
import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -66,10 +66,10 @@ public class WriteToBigQuery<T>
// The BigQuery 'type' of the field
private String fieldType;
// A lambda function to generate the field value
- private SerializableFunction<DoFn<T, TableRow>.ProcessContext, Object> fieldFn;
+ private SerializableFunction<OldDoFn<T, TableRow>.ProcessContext, Object> fieldFn;
public FieldInfo(String fieldType,
- SerializableFunction<DoFn<T, TableRow>.ProcessContext, Object> fieldFn) {
+ SerializableFunction<OldDoFn<T, TableRow>.ProcessContext, Object> fieldFn) {
this.fieldType = fieldType;
this.fieldFn = fieldFn;
}
@@ -78,12 +78,12 @@ public class WriteToBigQuery<T>
return this.fieldType;
}
- SerializableFunction<DoFn<T, TableRow>.ProcessContext, Object> getFieldFn() {
+ SerializableFunction<OldDoFn<T, TableRow>.ProcessContext, Object> getFieldFn() {
return this.fieldFn;
}
}
/** Convert each key/score pair into a BigQuery TableRow as specified by fieldFn. */
- protected class BuildRowFn extends DoFn<T, TableRow> {
+ protected class BuildRowFn extends OldDoFn<T, TableRow> {
@Override
public void processElement(ProcessContext c) {
@@ -92,7 +92,7 @@ public class WriteToBigQuery<T>
for (Map.Entry<String, FieldInfo<T>> entry : fieldInfo.entrySet()) {
String key = entry.getKey();
FieldInfo<T> fcnInfo = entry.getValue();
- SerializableFunction<DoFn<T, TableRow>.ProcessContext, Object> fcn =
+ SerializableFunction<OldDoFn<T, TableRow>.ProcessContext, Object> fcn =
fcnInfo.getFieldFn();
row.set(key, fcn.apply(c));
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java
index b4c9b4a..c59fd61 100644
--- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java
+++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java
@@ -20,8 +20,8 @@ package org.apache.beam.examples.complete.game.utils;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.values.PCollection;
@@ -45,7 +45,7 @@ public class WriteWindowedToBigQuery<T>
}
/** Convert each key/score pair into a BigQuery TableRow. */
- protected class BuildRowFn extends DoFn<T, TableRow>
+ protected class BuildRowFn extends OldDoFn<T, TableRow>
implements RequiresWindowAccess {
@Override
@@ -55,7 +55,7 @@ public class WriteWindowedToBigQuery<T>
for (Map.Entry<String, FieldInfo<T>> entry : fieldInfo.entrySet()) {
String key = entry.getKey();
FieldInfo<T> fcnInfo = entry.getValue();
- SerializableFunction<DoFn<T, TableRow>.ProcessContext, Object> fcn =
+ SerializableFunction<OldDoFn<T, TableRow>.ProcessContext, Object> fcn =
fcnInfo.getFieldFn();
row.set(key, fcn.apply(c));
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
----------------------------------------------------------------------
diff --git a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
index cc3e7fa..01efad8 100644
--- a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
+++ b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java
@@ -83,7 +83,7 @@ public class UserScoreTest implements Serializable {
KV.of("AndroidGreenKookaburra", 23),
KV.of("BisqueBilby", 14));
- /** Test the ParseEventFn DoFn. */
+ /** Test the ParseEventFn OldDoFn. */
@Test
public void testParseEventFn() throws Exception {
DoFnTester<String, GameActionInfo> parseEventFn =
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
index 0d320bc..7cdab00 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
@@ -18,7 +18,7 @@
package org.apache.beam.runners.core;
import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.Sum;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.DoFnRunner.ReduceFnExecutor;
@@ -41,10 +41,10 @@ import org.apache.beam.sdk.values.KV;
@SystemDoFnInternal
public class GroupAlsoByWindowViaWindowSetDoFn<
K, InputT, OutputT, W extends BoundedWindow, RinT extends KeyedWorkItem<K, InputT>>
- extends DoFn<RinT, KV<K, OutputT>> implements ReduceFnExecutor<K, InputT, OutputT, W> {
+ extends OldDoFn<RinT, KV<K, OutputT>> implements ReduceFnExecutor<K, InputT, OutputT, W> {
public static <K, InputT, OutputT, W extends BoundedWindow>
- DoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> create(
+ OldDoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> create(
WindowingStrategy<?, W> strategy,
StateInternalsFactory<K> stateInternalsFactory,
SystemReduceFn<K, InputT, ?, OutputT, W> reduceFn) {
@@ -99,11 +99,11 @@ public class GroupAlsoByWindowViaWindowSetDoFn<
}
@Override
- public DoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asDoFn() {
+ public OldDoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asDoFn() {
// Safe contravariant cast
@SuppressWarnings("unchecked")
- DoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asFn =
- (DoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>>) this;
+ OldDoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asFn =
+ (OldDoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>>) this;
return asFn;
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java
index 5821e73..3ce0c06 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java
@@ -18,6 +18,7 @@
package org.apache.beam.runners.core;
import static org.apache.beam.sdk.util.StringUtils.approximateSimpleName;
+
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
@@ -47,7 +48,6 @@ import com.google.common.collect.Lists;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java
index d40b007..739db45 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java
@@ -19,8 +19,8 @@ package org.apache.beam.sdk.util;
import static com.google.common.base.Preconditions.checkNotNull;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.transforms.windowing.WindowFn;
@@ -32,14 +32,14 @@ import org.joda.time.Instant;
import java.util.Collection;
/**
- * {@link DoFn} that tags elements of a {@link PCollection} with windows, according to the provided
- * {@link WindowFn}.
+ * {@link OldDoFn} that tags elements of a {@link PCollection} with windows, according to the
+ * provided {@link WindowFn}.
*
* @param <T> Type of elements being windowed
* @param <W> Window type
*/
@SystemDoFnInternal
-public class AssignWindowsDoFn<T, W extends BoundedWindow> extends DoFn<T, T>
+public class AssignWindowsDoFn<T, W extends BoundedWindow> extends OldDoFn<T, T>
implements RequiresWindowAccess {
private WindowFn<? super T, W> fn;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunner.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunner.java
index 4ec8920..49206d1 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunner.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunner.java
@@ -18,41 +18,42 @@
package org.apache.beam.sdk.util;
import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.ProcessContext;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.ProcessContext;
import org.apache.beam.sdk.values.KV;
/**
- * An wrapper interface that represents the execution of a {@link DoFn}.
+ * An wrapper interface that represents the execution of a {@link OldDoFn}.
*/
public interface DoFnRunner<InputT, OutputT> {
/**
- * Prepares and calls {@link DoFn#startBundle}.
+ * Prepares and calls {@link OldDoFn#startBundle}.
*/
public void startBundle();
/**
- * Calls {@link DoFn#processElement} with a {@link ProcessContext} containing the current element.
+ * Calls {@link OldDoFn#processElement} with a {@link ProcessContext} containing the current
+ * element.
*/
public void processElement(WindowedValue<InputT> elem);
/**
- * Calls {@link DoFn#finishBundle} and performs additional tasks, such as
+ * Calls {@link OldDoFn#finishBundle} and performs additional tasks, such as
* flushing in-memory states.
*/
public void finishBundle();
/**
- * An internal interface for signaling that a {@link DoFn} requires late data dropping.
+ * An internal interface for signaling that a {@link OldDoFn} requires late data dropping.
*/
public interface ReduceFnExecutor<K, InputT, OutputT, W> {
/**
- * Gets this object as a {@link DoFn}.
+ * Gets this object as a {@link OldDoFn}.
*
- * Most implementors of this interface are expected to be {@link DoFn} instances, and will
+ * Most implementors of this interface are expected to be {@link OldDoFn} instances, and will
* return themselves.
*/
- DoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asDoFn();
+ OldDoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asDoFn();
/**
* Returns an aggregator that tracks elements that are dropped due to being late.
[16/19] incubator-beam git commit: Port AutoComplete example from
OldDoFn to DoFn
Posted by dh...@apache.org.
Port AutoComplete example from OldDoFn to DoFn
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/3236eec2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/3236eec2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/3236eec2
Branch: refs/heads/master
Commit: 3236eec22a8902393e6becefb771b9a4768ccc50
Parents: 49d2f17
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:29:37 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700
----------------------------------------------------------------------
.../beam/examples/complete/AutoComplete.java | 30 ++++++++++----------
1 file changed, 15 insertions(+), 15 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3236eec2/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
index 7b44af8..1ab39c9 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
@@ -36,9 +36,9 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.StreamingOptions;
import org.apache.beam.sdk.options.Validation;
import org.apache.beam.sdk.transforms.Count;
+import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Filter;
import org.apache.beam.sdk.transforms.Flatten;
-import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Partition;
@@ -130,8 +130,8 @@ public class AutoComplete {
// Map the KV outputs of Count into our own CompletionCandiate class.
.apply("CreateCompletionCandidates", ParDo.of(
- new OldDoFn<KV<String, Long>, CompletionCandidate>() {
- @Override
+ new DoFn<KV<String, Long>, CompletionCandidate>() {
+ @ProcessElement
public void processElement(ProcessContext c) {
c.output(new CompletionCandidate(c.element().getKey(), c.element().getValue()));
}
@@ -209,8 +209,8 @@ public class AutoComplete {
}
private static class FlattenTops
- extends OldDoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
- @Override
+ extends DoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
+ @ProcessElement
public void processElement(ProcessContext c) {
for (CompletionCandidate cc : c.element().getValue()) {
c.output(cc);
@@ -260,10 +260,10 @@ public class AutoComplete {
}
/**
- * A OldDoFn that keys each candidate by all its prefixes.
+ * A DoFn that keys each candidate by all its prefixes.
*/
private static class AllPrefixes
- extends OldDoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
+ extends DoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
private final int minPrefix;
private final int maxPrefix;
public AllPrefixes(int minPrefix) {
@@ -273,8 +273,8 @@ public class AutoComplete {
this.minPrefix = minPrefix;
this.maxPrefix = maxPrefix;
}
- @Override
- public void processElement(ProcessContext c) {
+ @ProcessElement
+ public void processElement(ProcessContext c) {
String word = c.element().value;
for (int i = minPrefix; i <= Math.min(word.length(), maxPrefix); i++) {
c.output(KV.of(word.substring(0, i), c.element()));
@@ -341,8 +341,8 @@ public class AutoComplete {
/**
* Takes as input a set of strings, and emits each #hashtag found therein.
*/
- static class ExtractHashtags extends OldDoFn<String, String> {
- @Override
+ static class ExtractHashtags extends DoFn<String, String> {
+ @ProcessElement
public void processElement(ProcessContext c) {
Matcher m = Pattern.compile("#\\S+").matcher(c.element());
while (m.find()) {
@@ -351,8 +351,8 @@ public class AutoComplete {
}
}
- static class FormatForBigquery extends OldDoFn<KV<String, List<CompletionCandidate>>, TableRow> {
- @Override
+ static class FormatForBigquery extends DoFn<KV<String, List<CompletionCandidate>>, TableRow> {
+ @ProcessElement
public void processElement(ProcessContext c) {
List<TableRow> completions = new ArrayList<>();
for (CompletionCandidate cc : c.element().getValue()) {
@@ -385,14 +385,14 @@ public class AutoComplete {
* Takes as input a the top candidates per prefix, and emits an entity
* suitable for writing to Datastore.
*/
- static class FormatForDatastore extends OldDoFn<KV<String, List<CompletionCandidate>>, Entity> {
+ static class FormatForDatastore extends DoFn<KV<String, List<CompletionCandidate>>, Entity> {
private String kind;
public FormatForDatastore(String kind) {
this.kind = kind;
}
- @Override
+ @ProcessElement
public void processElement(ProcessContext c) {
Entity.Builder entityBuilder = Entity.newBuilder();
Key key = DatastoreHelper.makeKey(kind, c.element().getKey()).build();
[09/19] incubator-beam git commit: Rename DoFn to OldDoFn
Posted by dh...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
index 5b9eeff..5e96c46 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SerializationTest.java
@@ -30,7 +30,7 @@ import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Count;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
@@ -145,9 +145,9 @@ public class SerializationTest {
}
/**
- * A DoFn that tokenizes lines of text into individual words.
+ * A OldDoFn that tokenizes lines of text into individual words.
*/
- static class ExtractWordsFn extends DoFn<StringHolder, StringHolder> {
+ static class ExtractWordsFn extends OldDoFn<StringHolder, StringHolder> {
private static final Pattern WORD_BOUNDARY = Pattern.compile("[^a-zA-Z']+");
private final Aggregator<Long, Long> emptyLines =
createAggregator("emptyLines", new Sum.SumLongFn());
@@ -173,9 +173,9 @@ public class SerializationTest {
}
/**
- * A DoFn that converts a Word and Count into a printable string.
+ * A OldDoFn that converts a Word and Count into a printable string.
*/
- private static class FormatCountsFn extends DoFn<KV<StringHolder, Long>, StringHolder> {
+ private static class FormatCountsFn extends OldDoFn<KV<StringHolder, Long>, StringHolder> {
@Override
public void processElement(ProcessContext c) {
c.output(new StringHolder(c.element().getKey() + ": " + c.element().getValue()));
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
index 60b7f71..5775565 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/SideEffectsTest.java
@@ -28,7 +28,7 @@ import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.coders.StringDelegateCoder;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.junit.After;
@@ -54,7 +54,7 @@ public class SideEffectsTest implements Serializable {
pipeline.getCoderRegistry().registerCoder(URI.class, StringDelegateCoder.of(URI.class));
- pipeline.apply(Create.of("a")).apply(ParDo.of(new DoFn<String, String>() {
+ pipeline.apply(Create.of("a")).apply(ParDo.of(new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) throws Exception {
throw new UserException();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
index 904b448..c005f14 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/KafkaStreamingTest.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.FixedWindows;
import org.apache.beam.sdk.transforms.windowing.Window;
@@ -122,7 +122,7 @@ public class KafkaStreamingTest {
EMBEDDED_ZOOKEEPER.shutdown();
}
- private static class FormatKVFn extends DoFn<KV<String, String>, String> {
+ private static class FormatKVFn extends OldDoFn<KV<String, String>, String> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().getKey() + "," + c.element().getValue());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java
index 873a591..da4db93 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java
@@ -24,7 +24,6 @@ import org.apache.beam.sdk.values.TypeDescriptor;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java
index 9db6650..c34ce66 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java
@@ -20,7 +20,6 @@ package org.apache.beam.sdk.coders;
import org.apache.beam.sdk.util.common.ElementByteSizeObserver;
import com.fasterxml.jackson.annotation.JsonCreator;
-
import org.joda.time.Duration;
import org.joda.time.ReadableDuration;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java
index 693791c..d41bd1f 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java
@@ -22,7 +22,6 @@ import org.apache.beam.sdk.util.common.ElementByteSizeObserver;
import com.google.common.base.Converter;
import com.fasterxml.jackson.annotation.JsonCreator;
-
import org.joda.time.Instant;
import java.io.IOException;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
index ecb1f0a..182fa1f 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java
@@ -25,7 +25,7 @@ import org.apache.beam.sdk.coders.VoidCoder;
import org.apache.beam.sdk.options.PubsubOptions;
import org.apache.beam.sdk.runners.PipelineRunner;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -709,7 +709,7 @@ public class PubsubIO {
*
* <p>Public so can be suppressed by runners.
*/
- public class PubsubBoundedReader extends DoFn<Void, T> {
+ public class PubsubBoundedReader extends OldDoFn<Void, T> {
private static final int DEFAULT_PULL_SIZE = 100;
private static final int ACK_TIMEOUT_SEC = 60;
@@ -998,7 +998,7 @@ public class PubsubIO {
*
* <p>Public so can be suppressed by runners.
*/
- public class PubsubBoundedWriter extends DoFn<T, Void> {
+ public class PubsubBoundedWriter extends OldDoFn<T, Void> {
private static final int MAX_PUBLISH_BATCH_SIZE = 100;
private transient List<OutgoingMessage> output;
private transient PubsubClient pubsubClient;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
index 6f2b3ac..9e9536d 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java
@@ -31,8 +31,8 @@ import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.coders.VarIntCoder;
import org.apache.beam.sdk.options.PubsubOptions;
import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
@@ -78,7 +78,7 @@ import javax.annotation.Nullable;
* <li>We try to send messages in batches while also limiting send latency.
* <li>No stats are logged. Rather some counters are used to keep track of elements and batches.
* <li>Though some background threads are used by the underlying netty system all actual Pubsub
- * calls are blocking. We rely on the underlying runner to allow multiple {@link DoFn} instances
+ * calls are blocking. We rely on the underlying runner to allow multiple {@link OldDoFn} instances
* to execute concurrently and hide latency.
* <li>A failed bundle will cause messages to be resent. Thus we rely on the Pubsub consumer
* to dedup messages.
@@ -155,7 +155,7 @@ public class PubsubUnboundedSink<T> extends PTransform<PCollection<T>, PDone> {
/**
* Convert elements to messages and shard them.
*/
- private static class ShardFn<T> extends DoFn<T, KV<Integer, OutgoingMessage>> {
+ private static class ShardFn<T> extends OldDoFn<T, KV<Integer, OutgoingMessage>> {
private final Aggregator<Long, Long> elementCounter =
createAggregator("elements", new Sum.SumLongFn());
private final Coder<T> elementCoder;
@@ -207,7 +207,7 @@ public class PubsubUnboundedSink<T> extends PTransform<PCollection<T>, PDone> {
* Publish messages to Pubsub in batches.
*/
private static class WriterFn
- extends DoFn<KV<Integer, Iterable<OutgoingMessage>>, Void> {
+ extends OldDoFn<KV<Integer, Iterable<OutgoingMessage>>, Void> {
private final PubsubClientFactory pubsubFactory;
private final TopicPath topic;
private final String timestampLabel;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
index 07d355e..d98bd6a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java
@@ -31,7 +31,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PubsubOptions;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
@@ -1107,7 +1107,7 @@ public class PubsubUnboundedSource<T> extends PTransform<PBegin, PCollection<T>>
// StatsFn
// ================================================================================
- private static class StatsFn<T> extends DoFn<T, T> {
+ private static class StatsFn<T> extends OldDoFn<T, T> {
private final Aggregator<Long, Long> elementCounter =
createAggregator("elements", new Sum.SumLongFn());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java
index b8902f9..de00035 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java
@@ -19,9 +19,9 @@ package org.apache.beam.sdk.io;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.Coder;
-
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.display.HasDisplayData;
+
import org.joda.time.Instant;
import java.io.IOException;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java
index 42d3c05..3e997b0 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java
@@ -27,8 +27,8 @@ import org.apache.beam.sdk.io.Sink.WriteOperation;
import org.apache.beam.sdk.io.Sink.Writer;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -156,7 +156,7 @@ public class Write {
* Writes all the elements in a bundle using a {@link Writer} produced by the
* {@link WriteOperation} associated with the {@link Sink}.
*/
- private class WriteBundles<WriteT> extends DoFn<T, WriteT> {
+ private class WriteBundles<WriteT> extends OldDoFn<T, WriteT> {
// Writer that will write the records in this bundle. Lazily
// initialized in processElement.
private Writer<T, WriteT> writer = null;
@@ -182,7 +182,7 @@ public class Write {
// Discard write result and close the write.
try {
writer.close();
- // The writer does not need to be reset, as this DoFn cannot be reused.
+ // The writer does not need to be reset, as this OldDoFn cannot be reused.
} catch (Exception closeException) {
if (closeException instanceof InterruptedException) {
// Do not silently ignore interrupted state.
@@ -217,7 +217,7 @@ public class Write {
*
* @see WriteBundles
*/
- private class WriteShardedBundles<WriteT> extends DoFn<KV<Integer, Iterable<T>>, WriteT> {
+ private class WriteShardedBundles<WriteT> extends OldDoFn<KV<Integer, Iterable<T>>, WriteT> {
private final PCollectionView<WriteOperation<T, WriteT>> writeOperationView;
WriteShardedBundles(PCollectionView<WriteOperation<T, WriteT>> writeOperationView) {
@@ -296,10 +296,11 @@ public class Write {
* <p>This singleton collection containing the WriteOperation is then used as a side input to a
* ParDo over the PCollection of elements to write. In this bundle-writing phase,
* {@link WriteOperation#createWriter} is called to obtain a {@link Writer}.
- * {@link Writer#open} and {@link Writer#close} are called in {@link DoFn#startBundle} and
- * {@link DoFn#finishBundle}, respectively, and {@link Writer#write} method is called for every
- * element in the bundle. The output of this ParDo is a PCollection of <i>writer result</i>
- * objects (see {@link Sink} for a description of writer results)-one for each bundle.
+ * {@link Writer#open} and {@link Writer#close} are called in {@link OldDoFn#startBundle} and
+ * {@link OldDoFn#finishBundle}, respectively, and {@link Writer#write} method is called for
+ * every element in the bundle. The output of this ParDo is a PCollection of
+ * <i>writer result</i> objects (see {@link Sink} for a description of writer results)-one for
+ * each bundle.
*
* <p>The final do-once ParDo uses the singleton collection of the WriteOperation as input and
* the collection of writer results as a side-input. In this ParDo,
@@ -333,7 +334,7 @@ public class Write {
// Initialize the resource in a do-once ParDo on the WriteOperation.
operationCollection = operationCollection
.apply("Initialize", ParDo.of(
- new DoFn<WriteOperation<T, WriteT>, WriteOperation<T, WriteT>>() {
+ new OldDoFn<WriteOperation<T, WriteT>, WriteOperation<T, WriteT>>() {
@Override
public void processElement(ProcessContext c) throws Exception {
WriteOperation<T, WriteT> writeOperation = c.element();
@@ -387,7 +388,7 @@ public class Write {
// ParDo. There is a dependency between this ParDo and the parallel write (the writer results
// collection as a side input), so it will happen after the parallel write.
operationCollection
- .apply("Finalize", ParDo.of(new DoFn<WriteOperation<T, WriteT>, Integer>() {
+ .apply("Finalize", ParDo.of(new OldDoFn<WriteOperation<T, WriteT>, Integer>() {
@Override
public void processElement(ProcessContext c) throws Exception {
WriteOperation<T, WriteT> writeOperation = c.element();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java
index e0a1ef3..b2df96e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java
@@ -29,7 +29,6 @@ import com.google.common.base.Strings;
import com.google.common.io.Files;
import com.fasterxml.jackson.annotation.JsonIgnore;
-
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
index e89e5ad..aa9f13e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
@@ -22,8 +22,8 @@ import org.apache.beam.sdk.options.GoogleApiDebugOptions.GoogleApiTracer;
import org.apache.beam.sdk.options.ProxyInvocationHandler.Deserializer;
import org.apache.beam.sdk.options.ProxyInvocationHandler.Serializer;
import org.apache.beam.sdk.runners.PipelineRunner;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.Context;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.Context;
import org.apache.beam.sdk.transforms.display.HasDisplayData;
import com.google.auto.service.AutoService;
@@ -52,7 +52,7 @@ import javax.annotation.concurrent.ThreadSafe;
* and {@link PipelineOptionsFactory#as(Class)}. They can be created
* from command-line arguments with {@link PipelineOptionsFactory#fromArgs(String[])}.
* They can be converted to another type by invoking {@link PipelineOptions#as(Class)} and
- * can be accessed from within a {@link DoFn} by invoking
+ * can be accessed from within a {@link OldDoFn} by invoking
* {@link Context#getPipelineOptions()}.
*
* <p>For example:
@@ -151,7 +151,7 @@ import javax.annotation.concurrent.ThreadSafe;
* {@link PipelineOptionsFactory#withValidation()} is invoked.
*
* <p>{@link JsonIgnore @JsonIgnore} is used to prevent a property from being serialized and
- * available during execution of {@link DoFn}. See the Serialization section below for more
+ * available during execution of {@link OldDoFn}. See the Serialization section below for more
* details.
*
* <h2>Registration Of PipelineOptions</h2>
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
index f21b9b9..67fa2af 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
@@ -53,7 +53,6 @@ import com.google.common.collect.TreeMultimap;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
-
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java
index 815de82..607bdda 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java
@@ -18,6 +18,7 @@
package org.apache.beam.sdk.options;
import org.apache.beam.sdk.util.common.ReflectHelpers;
+
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/AggregatorValues.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/AggregatorValues.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/AggregatorValues.java
index a42ece2..6f6836e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/AggregatorValues.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/AggregatorValues.java
@@ -19,14 +19,14 @@ package org.apache.beam.sdk.runners;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import java.util.Collection;
import java.util.Map;
/**
* A collection of values associated with an {@link Aggregator}. Aggregators declared in a
- * {@link DoFn} are emitted on a per-{@code DoFn}-application basis.
+ * {@link OldDoFn} are emitted on a per-{@code OldDoFn}-application basis.
*
* @param <T> the output type of the aggregator
*/
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
index a202ed4..80340c2 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java
@@ -33,11 +33,11 @@ import org.apache.beam.sdk.options.StreamingOptions;
import org.apache.beam.sdk.runners.PipelineRunner;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -762,7 +762,7 @@ public class PAssert {
.apply("RewindowActuals", rewindowActuals.<T>windowActuals())
.apply(
ParDo.of(
- new DoFn<T, T>() {
+ new OldDoFn<T, T>() {
@Override
public void processElement(ProcessContext context) throws CoderException {
context.output(CoderUtils.clone(coder, context.element()));
@@ -884,7 +884,7 @@ public class PAssert {
}
}
- private static final class ConcatFn<T> extends DoFn<Iterable<Iterable<T>>, Iterable<T>> {
+ private static final class ConcatFn<T> extends OldDoFn<Iterable<Iterable<T>>, Iterable<T>> {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output(Iterables.concat(c.element()));
@@ -995,13 +995,13 @@ public class PAssert {
}
/**
- * A {@link DoFn} that runs a checking {@link SerializableFunction} on the contents of a
+ * A {@link OldDoFn} that runs a checking {@link SerializableFunction} on the contents of a
* {@link PCollectionView}, and adjusts counters and thrown exceptions for use in testing.
*
* <p>The input is ignored, but is {@link Integer} to be usable on runners that do not support
* null values.
*/
- private static class SideInputCheckerDoFn<ActualT> extends DoFn<Integer, Void> {
+ private static class SideInputCheckerDoFn<ActualT> extends OldDoFn<Integer, Void> {
private final SerializableFunction<ActualT, Void> checkerFn;
private final Aggregator<Integer, Integer> success =
createAggregator(SUCCESS_COUNTER, new Sum.SumIntegerFn());
@@ -1030,13 +1030,13 @@ public class PAssert {
}
/**
- * A {@link DoFn} that runs a checking {@link SerializableFunction} on the contents of
+ * A {@link OldDoFn} that runs a checking {@link SerializableFunction} on the contents of
* the single iterable element of the input {@link PCollection} and adjusts counters and
* thrown exceptions for use in testing.
*
* <p>The singleton property is presumed, not enforced.
*/
- private static class GroupedValuesCheckerDoFn<ActualT> extends DoFn<ActualT, Void> {
+ private static class GroupedValuesCheckerDoFn<ActualT> extends OldDoFn<ActualT, Void> {
private final SerializableFunction<ActualT, Void> checkerFn;
private final Aggregator<Integer, Integer> success =
createAggregator(SUCCESS_COUNTER, new Sum.SumIntegerFn());
@@ -1061,14 +1061,14 @@ public class PAssert {
}
/**
- * A {@link DoFn} that runs a checking {@link SerializableFunction} on the contents of
+ * A {@link OldDoFn} that runs a checking {@link SerializableFunction} on the contents of
* the single item contained within the single iterable on input and
* adjusts counters and thrown exceptions for use in testing.
*
* <p>The singleton property of the input {@link PCollection} is presumed, not enforced. However,
* each input element must be a singleton iterable, or this will fail.
*/
- private static class SingletonCheckerDoFn<ActualT> extends DoFn<Iterable<ActualT>, Void> {
+ private static class SingletonCheckerDoFn<ActualT> extends OldDoFn<Iterable<ActualT>, Void> {
private final SerializableFunction<ActualT, Void> checkerFn;
private final Aggregator<Integer, Integer> success =
createAggregator(SUCCESS_COUNTER, new Sum.SumIntegerFn());
@@ -1310,7 +1310,7 @@ public class PAssert {
}
/**
- * A DoFn that filters elements based on their presence in a static collection of windows.
+ * A OldDoFn that filters elements based on their presence in a static collection of windows.
*/
private static final class FilterWindows<T> extends PTransform<PCollection<T>, PCollection<T>> {
private final StaticWindows windows;
@@ -1324,7 +1324,7 @@ public class PAssert {
return input.apply("FilterWindows", ParDo.of(new Fn()));
}
- private class Fn extends DoFn<T, T> implements RequiresWindowAccess {
+ private class Fn extends OldDoFn<T, T> implements RequiresWindowAccess {
@Override
public void processElement(ProcessContext c) throws Exception {
if (windows.getWindows().contains(c.window())) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
index 45b0592..4e0c0be 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java
@@ -35,7 +35,6 @@ import java.io.Serializable;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
-
import javax.annotation.Nullable;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
index 0de3024..98cdeba 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
@@ -37,7 +37,6 @@ import com.fasterxml.jackson.core.TreeNode;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
-
import org.junit.experimental.categories.Category;
import java.io.IOException;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java
index ff553ba..c4596c1 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java
@@ -21,6 +21,7 @@ import org.apache.beam.sdk.PipelineResult;
import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.DefaultValueFactory;
import org.apache.beam.sdk.options.PipelineOptions;
+
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Aggregator.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Aggregator.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Aggregator.java
index c8aad78..db4ab33 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Aggregator.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Aggregator.java
@@ -24,8 +24,9 @@ import org.apache.beam.sdk.util.ExecutionContext;
* An {@code Aggregator<InputT>} enables monitoring of values of type {@code InputT},
* to be combined across all bundles.
*
- * <p>Aggregators are created by calling {@link DoFn#createAggregator DoFn.createAggregatorForDoFn},
- * typically from the {@link DoFn} constructor. Elements can be added to the
+ * <p>Aggregators are created by calling
+ * {@link OldDoFn#createAggregator OldDoFn.createAggregatorForDoFn},
+ * typically from the {@link OldDoFn} constructor. Elements can be added to the
* {@code Aggregator} by calling {@link Aggregator#addValue}.
*
* <p>Aggregators are visible in the monitoring UI, when the pipeline is run
@@ -36,7 +37,7 @@ import org.apache.beam.sdk.util.ExecutionContext;
*
* <p>Example:
* <pre> {@code
- * class MyDoFn extends DoFn<String, String> {
+ * class MyDoFn extends OldDoFn<String, String> {
* private Aggregator<Integer, Integer> myAggregator;
*
* public MyDoFn() {
@@ -78,8 +79,9 @@ public interface Aggregator<InputT, OutputT> {
/**
* Create an aggregator with the given {@code name} and {@link CombineFn}.
*
- * <p>This method is called to create an aggregator for a {@link DoFn}. It receives the class
- * of the {@link DoFn} being executed and the context of the step it is being executed in.
+ * <p>This method is called to create an aggregator for a {@link OldDoFn}. It receives the
+ * class of the {@link OldDoFn} being executed and the context of the step it is being
+ * executed in.
*/
<InputT, AccumT, OutputT> Aggregator<InputT, OutputT> createAggregatorForDoFn(
Class<?> fnClass, ExecutionContext.StepContext stepContext,
@@ -88,7 +90,7 @@ public interface Aggregator<InputT, OutputT> {
// TODO: Consider the following additional API conveniences:
// - In addition to createAggregatorForDoFn(), consider adding getAggregator() to
- // avoid the need to store the aggregator locally in a DoFn, i.e., create
+ // avoid the need to store the aggregator locally in a OldDoFn, i.e., create
// if not already present.
// - Add a shortcut for the most common aggregator:
// c.createAggregatorForDoFn("name", new Sum.SumIntegerFn()).
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AggregatorRetriever.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AggregatorRetriever.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AggregatorRetriever.java
index 97961e9..abed843 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AggregatorRetriever.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AggregatorRetriever.java
@@ -20,7 +20,7 @@ package org.apache.beam.sdk.transforms;
import java.util.Collection;
/**
- * An internal class for extracting {@link Aggregator Aggregators} from {@link DoFn DoFns}.
+ * An internal class for extracting {@link Aggregator Aggregators} from {@link OldDoFn DoFns}.
*/
public final class AggregatorRetriever {
private AggregatorRetriever() {
@@ -28,9 +28,9 @@ public final class AggregatorRetriever {
}
/**
- * Returns the {@link Aggregator Aggregators} created by the provided {@link DoFn}.
+ * Returns the {@link Aggregator Aggregators} created by the provided {@link OldDoFn}.
*/
- public static Collection<Aggregator<?, ?>> getAggregators(DoFn<?, ?> fn) {
+ public static Collection<Aggregator<?, ?>> getAggregators(OldDoFn<?, ?> fn) {
return fn.getAggregators();
}
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
index 96c03eb..6fc2324 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
@@ -1473,9 +1473,9 @@ public class Combine {
PCollection<OutputT> defaultIfEmpty = maybeEmpty.getPipeline()
.apply("CreateVoid", Create.of((Void) null).withCoder(VoidCoder.of()))
.apply("ProduceDefault", ParDo.withSideInputs(maybeEmptyView).of(
- new DoFn<Void, OutputT>() {
+ new OldDoFn<Void, OutputT>() {
@Override
- public void processElement(DoFn<Void, OutputT>.ProcessContext c) {
+ public void processElement(OldDoFn<Void, OutputT>.ProcessContext c) {
Iterator<OutputT> combined = c.sideInput(maybeEmptyView).iterator();
if (!combined.hasNext()) {
c.output(defaultValue);
@@ -2097,7 +2097,7 @@ public class Combine {
final TupleTag<KV<KV<K, Integer>, InputT>> hot = new TupleTag<>();
final TupleTag<KV<K, InputT>> cold = new TupleTag<>();
PCollectionTuple split = input.apply("AddNonce", ParDo.of(
- new DoFn<KV<K, InputT>, KV<K, InputT>>() {
+ new OldDoFn<KV<K, InputT>, KV<K, InputT>>() {
transient int counter;
@Override
public void startBundle(Context c) {
@@ -2135,8 +2135,8 @@ public class Combine {
.setWindowingStrategyInternal(preCombineStrategy)
.apply("PreCombineHot", Combine.perKey(hotPreCombine))
.apply("StripNonce", ParDo.of(
- new DoFn<KV<KV<K, Integer>, AccumT>,
- KV<K, InputOrAccum<InputT, AccumT>>>() {
+ new OldDoFn<KV<KV<K, Integer>, AccumT>,
+ KV<K, InputOrAccum<InputT, AccumT>>>() {
@Override
public void processElement(ProcessContext c) {
c.output(KV.of(
@@ -2151,7 +2151,7 @@ public class Combine {
.get(cold)
.setCoder(inputCoder)
.apply("PrepareCold", ParDo.of(
- new DoFn<KV<K, InputT>, KV<K, InputOrAccum<InputT, AccumT>>>() {
+ new OldDoFn<KV<K, InputT>, KV<K, InputOrAccum<InputT, AccumT>>>() {
@Override
public void processElement(ProcessContext c) {
c.output(KV.of(c.element().getKey(),
@@ -2359,7 +2359,7 @@ public class Combine {
final PerKeyCombineFnRunner<? super K, ? super InputT, ?, OutputT> combineFnRunner =
PerKeyCombineFnRunners.create(fn);
PCollection<KV<K, OutputT>> output = input.apply(ParDo.of(
- new DoFn<KV<K, ? extends Iterable<InputT>>, KV<K, OutputT>>() {
+ new OldDoFn<KV<K, ? extends Iterable<InputT>>, KV<K, OutputT>>() {
@Override
public void processElement(ProcessContext c) {
K key = c.element().getKey();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
index f2ed5e1..777deba 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java
@@ -90,7 +90,7 @@ public class CombineFns {
*
* PCollection<T> finalResultCollection = maxAndMean
* .apply(ParDo.of(
- * new DoFn<KV<K, CoCombineResult>, T>() {
+ * new OldDoFn<KV<K, CoCombineResult>, T>() {
* @Override
* public void processElement(ProcessContext c) throws Exception {
* KV<K, CoCombineResult> e = c.element();
@@ -133,7 +133,7 @@ public class CombineFns {
*
* PCollection<T> finalResultCollection = maxAndMean
* .apply(ParDo.of(
- * new DoFn<CoCombineResult, T>() {
+ * new OldDoFn<CoCombineResult, T>() {
* @Override
* public void processElement(ProcessContext c) throws Exception {
* CoCombineResult e = c.element();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
index 3a0fb5d..7601ffc 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java
@@ -107,7 +107,7 @@ public class Count {
public PCollection<KV<T, Long>> apply(PCollection<T> input) {
return
input
- .apply("Init", ParDo.of(new DoFn<T, KV<T, Void>>() {
+ .apply("Init", ParDo.of(new OldDoFn<T, KV<T, Void>>() {
@Override
public void processElement(ProcessContext c) {
c.output(KV.of(c.element(), (Void) null));
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
index fa645ab..fb7f784 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
@@ -486,7 +486,7 @@ public class Create<T> {
this.elementCoder = elementCoder;
}
- private static class ConvertTimestamps<T> extends DoFn<TimestampedValue<T>, T> {
+ private static class ConvertTimestamps<T> extends OldDoFn<TimestampedValue<T>, T> {
@Override
public void processElement(ProcessContext c) {
c.outputWithTimestamp(c.element().getValue(), c.element().getTimestamp());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
deleted file mode 100644
index 6d5d1ed..0000000
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
+++ /dev/null
@@ -1,565 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.transforms;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-import static com.google.common.base.Preconditions.checkState;
-
-import org.apache.beam.sdk.annotations.Experimental;
-import org.apache.beam.sdk.annotations.Experimental.Kind;
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.display.DisplayData;
-import org.apache.beam.sdk.transforms.display.HasDisplayData;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.transforms.windowing.PaneInfo;
-import org.apache.beam.sdk.util.WindowingInternals;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.TypeDescriptor;
-
-import com.google.common.base.MoreObjects;
-
-import org.joda.time.Duration;
-import org.joda.time.Instant;
-
-import java.io.Serializable;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-import java.util.UUID;
-
-/**
- * The argument to {@link ParDo} providing the code to use to process
- * elements of the input
- * {@link org.apache.beam.sdk.values.PCollection}.
- *
- * <p>See {@link ParDo} for more explanation, examples of use, and
- * discussion of constraints on {@code DoFn}s, including their
- * serializability, lack of access to global shared mutable state,
- * requirements for failure tolerance, and benefits of optimization.
- *
- * <p>{@code DoFn}s can be tested in the context of a particular
- * {@code Pipeline} by running that {@code Pipeline} on sample input
- * and then checking its output. Unit testing of a {@code DoFn},
- * separately from any {@code ParDo} transform or {@code Pipeline},
- * can be done via the {@link DoFnTester} harness.
- *
- * <p>{@link DoFnWithContext} (currently experimental) offers an alternative
- * mechanism for accessing {@link ProcessContext#window()} without the need
- * to implement {@link RequiresWindowAccess}.
- *
- * <p>See also {@link #processElement} for details on implementing the transformation
- * from {@code InputT} to {@code OutputT}.
- *
- * @param <InputT> the type of the (main) input elements
- * @param <OutputT> the type of the (main) output elements
- */
-public abstract class DoFn<InputT, OutputT> implements Serializable, HasDisplayData {
-
- /**
- * Information accessible to all methods in this {@code DoFn}.
- * Used primarily to output elements.
- */
- public abstract class Context {
-
- /**
- * Returns the {@code PipelineOptions} specified with the
- * {@link org.apache.beam.sdk.runners.PipelineRunner}
- * invoking this {@code DoFn}. The {@code PipelineOptions} will
- * be the default running via {@link DoFnTester}.
- */
- public abstract PipelineOptions getPipelineOptions();
-
- /**
- * Adds the given element to the main output {@code PCollection}.
- *
- * <p>Once passed to {@code output} the element should be considered
- * immutable and not be modified in any way. It may be cached or retained
- * by the Dataflow runtime or later steps in the pipeline, or used in
- * other unspecified ways.
- *
- * <p>If invoked from {@link DoFn#processElement processElement}, the output
- * element will have the same timestamp and be in the same windows
- * as the input element passed to {@link DoFn#processElement processElement}.
- *
- * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
- * this will attempt to use the
- * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
- * of the input {@code PCollection} to determine what windows the element
- * should be in, throwing an exception if the {@code WindowFn} attempts
- * to access any information about the input element. The output element
- * will have a timestamp of negative infinity.
- */
- public abstract void output(OutputT output);
-
- /**
- * Adds the given element to the main output {@code PCollection},
- * with the given timestamp.
- *
- * <p>Once passed to {@code outputWithTimestamp} the element should not be
- * modified in any way.
- *
- * <p>If invoked from {@link DoFn#processElement processElement}, the timestamp
- * must not be older than the input element's timestamp minus
- * {@link DoFn#getAllowedTimestampSkew getAllowedTimestampSkew}. The output element will
- * be in the same windows as the input element.
- *
- * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
- * this will attempt to use the
- * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
- * of the input {@code PCollection} to determine what windows the element
- * should be in, throwing an exception if the {@code WindowFn} attempts
- * to access any information about the input element except for the
- * timestamp.
- */
- public abstract void outputWithTimestamp(OutputT output, Instant timestamp);
-
- /**
- * Adds the given element to the side output {@code PCollection} with the
- * given tag.
- *
- * <p>Once passed to {@code sideOutput} the element should not be modified
- * in any way.
- *
- * <p>The caller of {@code ParDo} uses {@link ParDo#withOutputTags withOutputTags} to
- * specify the tags of side outputs that it consumes. Non-consumed side
- * outputs, e.g., outputs for monitoring purposes only, don't necessarily
- * need to be specified.
- *
- * <p>The output element will have the same timestamp and be in the same
- * windows as the input element passed to {@link DoFn#processElement processElement}.
- *
- * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
- * this will attempt to use the
- * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
- * of the input {@code PCollection} to determine what windows the element
- * should be in, throwing an exception if the {@code WindowFn} attempts
- * to access any information about the input element. The output element
- * will have a timestamp of negative infinity.
- *
- * @see ParDo#withOutputTags
- */
- public abstract <T> void sideOutput(TupleTag<T> tag, T output);
-
- /**
- * Adds the given element to the specified side output {@code PCollection},
- * with the given timestamp.
- *
- * <p>Once passed to {@code sideOutputWithTimestamp} the element should not be
- * modified in any way.
- *
- * <p>If invoked from {@link DoFn#processElement processElement}, the timestamp
- * must not be older than the input element's timestamp minus
- * {@link DoFn#getAllowedTimestampSkew getAllowedTimestampSkew}. The output element will
- * be in the same windows as the input element.
- *
- * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
- * this will attempt to use the
- * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
- * of the input {@code PCollection} to determine what windows the element
- * should be in, throwing an exception if the {@code WindowFn} attempts
- * to access any information about the input element except for the
- * timestamp.
- *
- * @see ParDo#withOutputTags
- */
- public abstract <T> void sideOutputWithTimestamp(
- TupleTag<T> tag, T output, Instant timestamp);
-
- /**
- * Creates an {@link Aggregator} in the {@link DoFn} context with the
- * specified name and aggregation logic specified by {@link CombineFn}.
- *
- * <p>For internal use only.
- *
- * @param name the name of the aggregator
- * @param combiner the {@link CombineFn} to use in the aggregator
- * @return an aggregator for the provided name and {@link CombineFn} in this
- * context
- */
- @Experimental(Kind.AGGREGATOR)
- protected abstract <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
- createAggregatorInternal(String name, CombineFn<AggInputT, ?, AggOutputT> combiner);
-
- /**
- * Sets up {@link Aggregator}s created by the {@link DoFn} so they are
- * usable within this context.
- *
- * <p>This method should be called by runners before {@link DoFn#startBundle}
- * is executed.
- */
- @Experimental(Kind.AGGREGATOR)
- protected final void setupDelegateAggregators() {
- for (DelegatingAggregator<?, ?> aggregator : aggregators.values()) {
- setupDelegateAggregator(aggregator);
- }
-
- aggregatorsAreFinal = true;
- }
-
- private final <AggInputT, AggOutputT> void setupDelegateAggregator(
- DelegatingAggregator<AggInputT, AggOutputT> aggregator) {
-
- Aggregator<AggInputT, AggOutputT> delegate = createAggregatorInternal(
- aggregator.getName(), aggregator.getCombineFn());
-
- aggregator.setDelegate(delegate);
- }
- }
-
- /**
- * Information accessible when running {@link DoFn#processElement}.
- */
- public abstract class ProcessContext extends Context {
-
- /**
- * Returns the input element to be processed.
- *
- * <p>The element should be considered immutable. The Dataflow runtime will not mutate the
- * element, so it is safe to cache, etc. The element should not be mutated by any of the
- * {@link DoFn} methods, because it may be cached elsewhere, retained by the Dataflow runtime,
- * or used in other unspecified ways.
- */
- public abstract InputT element();
-
- /**
- * Returns the value of the side input for the window corresponding to the
- * window of the main input element.
- *
- * <p>See
- * {@link org.apache.beam.sdk.transforms.windowing.WindowFn#getSideInputWindow}
- * for how this corresponding window is determined.
- *
- * @throws IllegalArgumentException if this is not a side input
- * @see ParDo#withSideInputs
- */
- public abstract <T> T sideInput(PCollectionView<T> view);
-
- /**
- * Returns the timestamp of the input element.
- *
- * <p>See {@link org.apache.beam.sdk.transforms.windowing.Window}
- * for more information.
- */
- public abstract Instant timestamp();
-
- /**
- * Returns the window into which the input element has been assigned.
- *
- * <p>See {@link org.apache.beam.sdk.transforms.windowing.Window}
- * for more information.
- *
- * @throws UnsupportedOperationException if this {@link DoFn} does
- * not implement {@link RequiresWindowAccess}.
- */
- public abstract BoundedWindow window();
-
- /**
- * Returns information about the pane within this window into which the
- * input element has been assigned.
- *
- * <p>Generally all data is in a single, uninteresting pane unless custom
- * triggering and/or late data has been explicitly requested.
- * See {@link org.apache.beam.sdk.transforms.windowing.Window}
- * for more information.
- */
- public abstract PaneInfo pane();
-
- /**
- * Returns the process context to use for implementing windowing.
- */
- @Experimental
- public abstract WindowingInternals<InputT, OutputT> windowingInternals();
- }
-
- /**
- * Returns the allowed timestamp skew duration, which is the maximum
- * duration that timestamps can be shifted backward in
- * {@link DoFn.Context#outputWithTimestamp}.
- *
- * <p>The default value is {@code Duration.ZERO}, in which case
- * timestamps can only be shifted forward to future. For infinite
- * skew, return {@code Duration.millis(Long.MAX_VALUE)}.
- *
- * <p> Note that producing an element whose timestamp is less than the
- * current timestamp may result in late data, i.e. returning a non-zero
- * value here does not impact watermark calculations used for firing
- * windows.
- *
- * @deprecated does not interact well with the watermark.
- */
- @Deprecated
- public Duration getAllowedTimestampSkew() {
- return Duration.ZERO;
- }
-
- /**
- * Interface for signaling that a {@link DoFn} needs to access the window the
- * element is being processed in, via {@link DoFn.ProcessContext#window}.
- */
- @Experimental
- public interface RequiresWindowAccess {}
-
- public DoFn() {
- this(new HashMap<String, DelegatingAggregator<?, ?>>());
- }
-
- DoFn(Map<String, DelegatingAggregator<?, ?>> aggregators) {
- this.aggregators = aggregators;
- }
-
- /////////////////////////////////////////////////////////////////////////////
-
- private final Map<String, DelegatingAggregator<?, ?>> aggregators;
-
- /**
- * Protects aggregators from being created after initialization.
- */
- private boolean aggregatorsAreFinal;
-
- /**
- * Prepares this {@code DoFn} instance for processing a batch of elements.
- *
- * <p>By default, does nothing.
- */
- public void startBundle(Context c) throws Exception {
- }
-
- /**
- * Processes one input element.
- *
- * <p>The current element of the input {@code PCollection} is returned by
- * {@link ProcessContext#element() c.element()}. It should be considered immutable. The Dataflow
- * runtime will not mutate the element, so it is safe to cache, etc. The element should not be
- * mutated by any of the {@link DoFn} methods, because it may be cached elsewhere, retained by the
- * Dataflow runtime, or used in other unspecified ways.
- *
- * <p>A value is added to the main output {@code PCollection} by {@link ProcessContext#output}.
- * Once passed to {@code output} the element should be considered immutable and not be modified in
- * any way. It may be cached elsewhere, retained by the Dataflow runtime, or used in other
- * unspecified ways.
- *
- * @see ProcessContext
- */
- public abstract void processElement(ProcessContext c) throws Exception;
-
- /**
- * Finishes processing this batch of elements.
- *
- * <p>By default, does nothing.
- */
- public void finishBundle(Context c) throws Exception {
- }
-
- /**
- * {@inheritDoc}
- *
- * <p>By default, does not register any display data. Implementors may override this method
- * to provide their own display data.
- */
- @Override
- public void populateDisplayData(DisplayData.Builder builder) {
- }
-
- /////////////////////////////////////////////////////////////////////////////
-
- /**
- * Returns a {@link TypeDescriptor} capturing what is known statically
- * about the input type of this {@code DoFn} instance's most-derived
- * class.
- *
- * <p>See {@link #getOutputTypeDescriptor} for more discussion.
- */
- protected TypeDescriptor<InputT> getInputTypeDescriptor() {
- return new TypeDescriptor<InputT>(getClass()) {};
- }
-
- /**
- * Returns a {@link TypeDescriptor} capturing what is known statically
- * about the output type of this {@code DoFn} instance's
- * most-derived class.
- *
- * <p>In the normal case of a concrete {@code DoFn} subclass with
- * no generic type parameters of its own (including anonymous inner
- * classes), this will be a complete non-generic type, which is good
- * for choosing a default output {@code Coder<OutputT>} for the output
- * {@code PCollection<OutputT>}.
- */
- protected TypeDescriptor<OutputT> getOutputTypeDescriptor() {
- return new TypeDescriptor<OutputT>(getClass()) {};
- }
-
- /**
- * Returns an {@link Aggregator} with aggregation logic specified by the
- * {@link CombineFn} argument. The name provided must be unique across
- * {@link Aggregator}s created within the DoFn. Aggregators can only be created
- * during pipeline construction.
- *
- * @param name the name of the aggregator
- * @param combiner the {@link CombineFn} to use in the aggregator
- * @return an aggregator for the provided name and combiner in the scope of
- * this DoFn
- * @throws NullPointerException if the name or combiner is null
- * @throws IllegalArgumentException if the given name collides with another
- * aggregator in this scope
- * @throws IllegalStateException if called during pipeline processing.
- */
- protected final <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
- createAggregator(String name, CombineFn<? super AggInputT, ?, AggOutputT> combiner) {
- checkNotNull(name, "name cannot be null");
- checkNotNull(combiner, "combiner cannot be null");
- checkArgument(!aggregators.containsKey(name),
- "Cannot create aggregator with name %s."
- + " An Aggregator with that name already exists within this scope.",
- name);
-
- checkState(!aggregatorsAreFinal, "Cannot create an aggregator during DoFn processing."
- + " Aggregators should be registered during pipeline construction.");
-
- DelegatingAggregator<AggInputT, AggOutputT> aggregator =
- new DelegatingAggregator<>(name, combiner);
- aggregators.put(name, aggregator);
- return aggregator;
- }
-
- /**
- * Returns an {@link Aggregator} with the aggregation logic specified by the
- * {@link SerializableFunction} argument. The name provided must be unique
- * across {@link Aggregator}s created within the DoFn. Aggregators can only be
- * created during pipeline construction.
- *
- * @param name the name of the aggregator
- * @param combiner the {@link SerializableFunction} to use in the aggregator
- * @return an aggregator for the provided name and combiner in the scope of
- * this DoFn
- * @throws NullPointerException if the name or combiner is null
- * @throws IllegalArgumentException if the given name collides with another
- * aggregator in this scope
- * @throws IllegalStateException if called during pipeline processing.
- */
- protected final <AggInputT> Aggregator<AggInputT, AggInputT> createAggregator(String name,
- SerializableFunction<Iterable<AggInputT>, AggInputT> combiner) {
- checkNotNull(combiner, "combiner cannot be null.");
- return createAggregator(name, Combine.IterableCombineFn.of(combiner));
- }
-
- /**
- * Returns the {@link Aggregator Aggregators} created by this {@code DoFn}.
- */
- Collection<Aggregator<?, ?>> getAggregators() {
- return Collections.<Aggregator<?, ?>>unmodifiableCollection(aggregators.values());
- }
-
- /**
- * An {@link Aggregator} that delegates calls to addValue to another
- * aggregator.
- *
- * @param <AggInputT> the type of input element
- * @param <AggOutputT> the type of output element
- */
- static class DelegatingAggregator<AggInputT, AggOutputT> implements
- Aggregator<AggInputT, AggOutputT>, Serializable {
- private final UUID id;
-
- private final String name;
-
- private final CombineFn<AggInputT, ?, AggOutputT> combineFn;
-
- private Aggregator<AggInputT, ?> delegate;
-
- public DelegatingAggregator(String name,
- CombineFn<? super AggInputT, ?, AggOutputT> combiner) {
- this.id = UUID.randomUUID();
- this.name = checkNotNull(name, "name cannot be null");
- // Safe contravariant cast
- @SuppressWarnings("unchecked")
- CombineFn<AggInputT, ?, AggOutputT> specificCombiner =
- (CombineFn<AggInputT, ?, AggOutputT>) checkNotNull(combiner, "combineFn cannot be null");
- this.combineFn = specificCombiner;
- }
-
- @Override
- public void addValue(AggInputT value) {
- if (delegate == null) {
- throw new IllegalStateException(
- "addValue cannot be called on Aggregator outside of the execution of a DoFn.");
- } else {
- delegate.addValue(value);
- }
- }
-
- @Override
- public String getName() {
- return name;
- }
-
- @Override
- public CombineFn<AggInputT, ?, AggOutputT> getCombineFn() {
- return combineFn;
- }
-
- /**
- * Sets the current delegate of the Aggregator.
- *
- * @param delegate the delegate to set in this aggregator
- */
- public void setDelegate(Aggregator<AggInputT, ?> delegate) {
- this.delegate = delegate;
- }
-
- @Override
- public String toString() {
- return MoreObjects.toStringHelper(getClass())
- .add("name", name)
- .add("combineFn", combineFn)
- .toString();
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(id, name, combineFn.getClass());
- }
-
- /**
- * Indicates whether some other object is "equal to" this one.
- *
- * <p>{@code DelegatingAggregator} instances are equal if they have the same name, their
- * CombineFns are the same class, and they have identical IDs.
- */
- @Override
- public boolean equals(Object o) {
- if (o == this) {
- return true;
- }
- if (o == null) {
- return false;
- }
- if (o instanceof DelegatingAggregator) {
- DelegatingAggregator<?, ?> that = (DelegatingAggregator<?, ?>) o;
- return Objects.equals(this.id, that.id)
- && Objects.equals(this.name, that.name)
- && Objects.equals(this.combineFn.getClass(), that.combineFn.getClass());
- }
- return false;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
index 0616eff..d8d4181 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
@@ -194,7 +194,7 @@ public abstract class DoFnReflector {
*/
public abstract boolean usesSingleWindow();
- /** Create an {@link DoFnInvoker} bound to the given {@link DoFn}. */
+ /** Create an {@link DoFnInvoker} bound to the given {@link OldDoFn}. */
public abstract <InputT, OutputT> DoFnInvoker<InputT, OutputT> bindInvoker(
DoFnWithContext<InputT, OutputT> fn);
@@ -217,9 +217,9 @@ public abstract class DoFnReflector {
}
/**
- * Create a {@link DoFn} that the {@link DoFnWithContext}.
+ * Create a {@link OldDoFn} that the {@link DoFnWithContext}.
*/
- public <InputT, OutputT> DoFn<InputT, OutputT> toDoFn(DoFnWithContext<InputT, OutputT> fn) {
+ public <InputT, OutputT> OldDoFn<InputT, OutputT> toDoFn(DoFnWithContext<InputT, OutputT> fn) {
if (usesSingleWindow()) {
return new WindowDoFnAdapter<InputT, OutputT>(this, fn);
} else {
@@ -287,7 +287,7 @@ public abstract class DoFnReflector {
* <li>Any generics on the extra context arguments match what is expected. Eg.,
* {@code WindowingInternals<InputT, OutputT>} either matches the
* {@code InputT} and {@code OutputT} parameters of the
- * {@code DoFn<InputT, OutputT>.ProcessContext}, or it uses a wildcard, etc.
+ * {@code OldDoFn<InputT, OutputT>.ProcessContext}, or it uses a wildcard, etc.
* </ol>
*
* @param m the method to verify
@@ -328,7 +328,7 @@ public abstract class DoFnReflector {
AdditionalParameter[] contextInfos = new AdditionalParameter[params.length - 1];
// Fill in the generics in the allExtraContextArgs interface from the types in the
- // Context or ProcessContext DoFn.
+ // Context or ProcessContext OldDoFn.
ParameterizedType pt = (ParameterizedType) contextToken.getType();
// We actually want the owner, since ProcessContext and Context are owned by DoFnWithContext.
pt = (ParameterizedType) pt.getOwnerType();
@@ -364,18 +364,18 @@ public abstract class DoFnReflector {
return ImmutableList.copyOf(contextInfos);
}
- /** Interface for invoking the {@code DoFn} processing methods. */
+ /** Interface for invoking the {@code OldDoFn} processing methods. */
public interface DoFnInvoker<InputT, OutputT> {
- /** Invoke {@link DoFn#startBundle} on the bound {@code DoFn}. */
+ /** Invoke {@link OldDoFn#startBundle} on the bound {@code OldDoFn}. */
void invokeStartBundle(
DoFnWithContext<InputT, OutputT>.Context c,
ExtraContextFactory<InputT, OutputT> extra);
- /** Invoke {@link DoFn#finishBundle} on the bound {@code DoFn}. */
+ /** Invoke {@link OldDoFn#finishBundle} on the bound {@code OldDoFn}. */
void invokeFinishBundle(
DoFnWithContext<InputT, OutputT>.Context c,
ExtraContextFactory<InputT, OutputT> extra);
- /** Invoke {@link DoFn#processElement} on the bound {@code DoFn}. */
+ /** Invoke {@link OldDoFn#processElement} on the bound {@code OldDoFn}. */
public void invokeProcessElement(
DoFnWithContext<InputT, OutputT>.ProcessContext c,
ExtraContextFactory<InputT, OutputT> extra);
@@ -565,10 +565,10 @@ public abstract class DoFnReflector {
extends DoFnWithContext<InputT, OutputT>.Context
implements DoFnWithContext.ExtraContextFactory<InputT, OutputT> {
- private DoFn<InputT, OutputT>.Context context;
+ private OldDoFn<InputT, OutputT>.Context context;
private ContextAdapter(
- DoFnWithContext<InputT, OutputT> fn, DoFn<InputT, OutputT>.Context context) {
+ DoFnWithContext<InputT, OutputT> fn, OldDoFn<InputT, OutputT>.Context context) {
fn.super();
this.context = context;
}
@@ -618,11 +618,11 @@ public abstract class DoFnReflector {
extends DoFnWithContext<InputT, OutputT>.ProcessContext
implements DoFnWithContext.ExtraContextFactory<InputT, OutputT> {
- private DoFn<InputT, OutputT>.ProcessContext context;
+ private OldDoFn<InputT, OutputT>.ProcessContext context;
private ProcessContextAdapter(
DoFnWithContext<InputT, OutputT> fn,
- DoFn<InputT, OutputT>.ProcessContext context) {
+ OldDoFn<InputT, OutputT>.ProcessContext context) {
fn.super();
this.context = context;
}
@@ -683,7 +683,7 @@ public abstract class DoFnReflector {
}
}
- public static Class<?> getDoFnClass(DoFn<?, ?> fn) {
+ public static Class<?> getDoFnClass(OldDoFn<?, ?> fn) {
if (fn instanceof SimpleDoFnAdapter) {
return ((SimpleDoFnAdapter<?, ?>) fn).fn.getClass();
} else {
@@ -691,7 +691,7 @@ public abstract class DoFnReflector {
}
}
- private static class SimpleDoFnAdapter<InputT, OutputT> extends DoFn<InputT, OutputT> {
+ private static class SimpleDoFnAdapter<InputT, OutputT> extends OldDoFn<InputT, OutputT> {
private final DoFnWithContext<InputT, OutputT> fn;
private transient DoFnInvoker<InputT, OutputT> invoker;
@@ -703,19 +703,19 @@ public abstract class DoFnReflector {
}
@Override
- public void startBundle(DoFn<InputT, OutputT>.Context c) throws Exception {
+ public void startBundle(OldDoFn<InputT, OutputT>.Context c) throws Exception {
ContextAdapter<InputT, OutputT> adapter = new ContextAdapter<>(fn, c);
invoker.invokeStartBundle(adapter, adapter);
}
@Override
- public void finishBundle(DoFn<InputT, OutputT>.Context c) throws Exception {
+ public void finishBundle(OldDoFn<InputT, OutputT>.Context c) throws Exception {
ContextAdapter<InputT, OutputT> adapter = new ContextAdapter<>(fn, c);
invoker.invokeFinishBundle(adapter, adapter);
}
@Override
- public void processElement(DoFn<InputT, OutputT>.ProcessContext c) throws Exception {
+ public void processElement(OldDoFn<InputT, OutputT>.ProcessContext c) throws Exception {
ProcessContextAdapter<InputT, OutputT> adapter = new ProcessContextAdapter<>(fn, c);
invoker.invokeProcessElement(adapter, adapter);
}
@@ -743,7 +743,7 @@ public abstract class DoFnReflector {
}
private static class WindowDoFnAdapter<InputT, OutputT>
- extends SimpleDoFnAdapter<InputT, OutputT> implements DoFn.RequiresWindowAccess {
+ extends SimpleDoFnAdapter<InputT, OutputT> implements OldDoFn.RequiresWindowAccess {
private WindowDoFnAdapter(DoFnReflector reflector, DoFnWithContext<InputT, OutputT> fn) {
super(reflector, fn);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
index a136632..9336e4c 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
@@ -49,12 +49,12 @@ import java.util.List;
import java.util.Map;
/**
- * A harness for unit-testing a {@link DoFn}.
+ * A harness for unit-testing a {@link OldDoFn}.
*
* <p>For example:
*
* <pre> {@code
- * DoFn<InputT, OutputT> fn = ...;
+ * OldDoFn<InputT, OutputT> fn = ...;
*
* DoFnTester<InputT, OutputT> fnTester = DoFnTester.of(fn);
*
@@ -71,22 +71,22 @@ import java.util.Map;
* Assert.assertThat(fnTester.processBundle(i1, i2, ...), Matchers.hasItems(...));
* } </pre>
*
- * @param <InputT> the type of the {@code DoFn}'s (main) input elements
- * @param <OutputT> the type of the {@code DoFn}'s (main) output elements
+ * @param <InputT> the type of the {@code OldDoFn}'s (main) input elements
+ * @param <OutputT> the type of the {@code OldDoFn}'s (main) output elements
*/
public class DoFnTester<InputT, OutputT> {
/**
* Returns a {@code DoFnTester} supporting unit-testing of the given
- * {@link DoFn}.
+ * {@link OldDoFn}.
*/
@SuppressWarnings("unchecked")
- public static <InputT, OutputT> DoFnTester<InputT, OutputT> of(DoFn<InputT, OutputT> fn) {
+ public static <InputT, OutputT> DoFnTester<InputT, OutputT> of(OldDoFn<InputT, OutputT> fn) {
return new DoFnTester<InputT, OutputT>(fn);
}
/**
* Returns a {@code DoFnTester} supporting unit-testing of the given
- * {@link DoFn}.
+ * {@link OldDoFn}.
*/
@SuppressWarnings("unchecked")
public static <InputT, OutputT> DoFnTester<InputT, OutputT>
@@ -96,12 +96,12 @@ public class DoFnTester<InputT, OutputT> {
/**
* Registers the tuple of values of the side input {@link PCollectionView}s to
- * pass to the {@link DoFn} under test.
+ * pass to the {@link OldDoFn} under test.
*
* <p>Resets the state of this {@link DoFnTester}.
*
* <p>If this isn't called, {@code DoFnTester} assumes the
- * {@link DoFn} takes no side inputs.
+ * {@link OldDoFn} takes no side inputs.
*/
public void setSideInputs(Map<PCollectionView<?>, Map<BoundedWindow, ?>> sideInputs) {
this.sideInputs = sideInputs;
@@ -109,8 +109,8 @@ public class DoFnTester<InputT, OutputT> {
}
/**
- * Registers the values of a side input {@link PCollectionView} to pass to the {@link DoFn} under
- * test.
+ * Registers the values of a side input {@link PCollectionView} to pass to the {@link OldDoFn}
+ * under test.
*
* <p>The provided value is the final value of the side input in the specified window, not
* the value of the input PCollection in that window.
@@ -128,7 +128,7 @@ public class DoFnTester<InputT, OutputT> {
}
/**
- * Whether or not a {@link DoFnTester} should clone the {@link DoFn} under test.
+ * Whether or not a {@link DoFnTester} should clone the {@link OldDoFn} under test.
*/
public enum CloningBehavior {
CLONE,
@@ -136,14 +136,14 @@ public class DoFnTester<InputT, OutputT> {
}
/**
- * Instruct this {@link DoFnTester} whether or not to clone the {@link DoFn} under test.
+ * Instruct this {@link DoFnTester} whether or not to clone the {@link OldDoFn} under test.
*/
public void setCloningBehavior(CloningBehavior newValue) {
this.cloningBehavior = newValue;
}
/**
- * Indicates whether this {@link DoFnTester} will clone the {@link DoFn} under test.
+ * Indicates whether this {@link DoFnTester} will clone the {@link OldDoFn} under test.
*/
public CloningBehavior getCloningBehavior() {
return cloningBehavior;
@@ -165,7 +165,7 @@ public class DoFnTester<InputT, OutputT> {
}
/**
- * A convenience method for testing {@link DoFn DoFns} with bundles of elements.
+ * A convenience method for testing {@link OldDoFn DoFns} with bundles of elements.
* Logic proceeds as follows:
*
* <ol>
@@ -181,9 +181,9 @@ public class DoFnTester<InputT, OutputT> {
}
/**
- * Calls {@link DoFn#startBundle} on the {@code DoFn} under test.
+ * Calls {@link OldDoFn#startBundle} on the {@code OldDoFn} under test.
*
- * <p>If needed, first creates a fresh instance of the DoFn under test.
+ * <p>If needed, first creates a fresh instance of the OldDoFn under test.
*/
public void startBundle() throws Exception {
resetState();
@@ -195,14 +195,14 @@ public class DoFnTester<InputT, OutputT> {
}
/**
- * Calls {@link DoFn#processElement} on the {@code DoFn} under test, in a
- * context where {@link DoFn.ProcessContext#element} returns the
+ * Calls {@link OldDoFn#processElement} on the {@code OldDoFn} under test, in a
+ * context where {@link OldDoFn.ProcessContext#element} returns the
* given element.
*
* <p>Will call {@link #startBundle} automatically, if it hasn't
* already been called.
*
- * @throws IllegalStateException if the {@code DoFn} under test has already
+ * @throws IllegalStateException if the {@code OldDoFn} under test has already
* been finished
*/
public void processElement(InputT element) throws Exception {
@@ -216,12 +216,12 @@ public class DoFnTester<InputT, OutputT> {
}
/**
- * Calls {@link DoFn#finishBundle} of the {@code DoFn} under test.
+ * Calls {@link OldDoFn#finishBundle} of the {@code OldDoFn} under test.
*
* <p>Will call {@link #startBundle} automatically, if it hasn't
* already been called.
*
- * @throws IllegalStateException if the {@code DoFn} under test has already
+ * @throws IllegalStateException if the {@code OldDoFn} under test has already
* been finished
*/
public void finishBundle() throws Exception {
@@ -403,18 +403,18 @@ public class DoFnTester<InputT, OutputT> {
return MoreObjects.firstNonNull(elems, Collections.<WindowedValue<T>>emptyList());
}
- private TestContext<InputT, OutputT> createContext(DoFn<InputT, OutputT> fn) {
+ private TestContext<InputT, OutputT> createContext(OldDoFn<InputT, OutputT> fn) {
return new TestContext<>(fn, options, mainOutputTag, outputs, accumulators);
}
- private static class TestContext<InT, OutT> extends DoFn<InT, OutT>.Context {
+ private static class TestContext<InT, OutT> extends OldDoFn<InT, OutT>.Context {
private final PipelineOptions opts;
private final TupleTag<OutT> mainOutputTag;
private final Map<TupleTag<?>, List<WindowedValue<?>>> outputs;
private final Map<String, Object> accumulators;
public TestContext(
- DoFn<InT, OutT> fn,
+ OldDoFn<InT, OutT> fn,
PipelineOptions opts,
TupleTag<OutT> mainOutputTag,
Map<TupleTag<?>, List<WindowedValue<?>>> outputs,
@@ -498,7 +498,7 @@ public class DoFnTester<InputT, OutputT> {
}
private TestProcessContext<InputT, OutputT> createProcessContext(
- DoFn<InputT, OutputT> fn,
+ OldDoFn<InputT, OutputT> fn,
InputT elem) {
return new TestProcessContext<>(fn,
createContext(fn),
@@ -507,14 +507,14 @@ public class DoFnTester<InputT, OutputT> {
sideInputs);
}
- private static class TestProcessContext<InT, OutT> extends DoFn<InT, OutT>.ProcessContext {
+ private static class TestProcessContext<InT, OutT> extends OldDoFn<InT, OutT>.ProcessContext {
private final TestContext<InT, OutT> context;
private final TupleTag<OutT> mainOutputTag;
private final WindowedValue<InT> element;
private final Map<PCollectionView<?>, Map<BoundedWindow, ?>> sideInputs;
private TestProcessContext(
- DoFn<InT, OutT> fn,
+ OldDoFn<InT, OutT> fn,
TestContext<InT, OutT> context,
WindowedValue<InT> element,
TupleTag<OutT> mainOutputTag,
@@ -643,15 +643,15 @@ public class DoFnTester<InputT, OutputT> {
protected <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT> createAggregatorInternal(
String name, CombineFn<AggInputT, ?, AggOutputT> combiner) {
throw new IllegalStateException("Aggregators should not be created within ProcessContext. "
- + "Instead, create an aggregator at DoFn construction time with createAggregatorForDoFn,"
- + " and ensure they are set up by the time startBundle is called "
- + "with setupDelegateAggregators.");
+ + "Instead, create an aggregator at OldDoFn construction time with"
+ + " createAggregatorForDoFn, and ensure they are set up by the time startBundle is"
+ + " called with setupDelegateAggregators.");
}
}
/////////////////////////////////////////////////////////////////////////////
- /** The possible states of processing a DoFn. */
+ /** The possible states of processing a OldDoFn. */
enum State {
UNSTARTED,
STARTED,
@@ -660,35 +660,35 @@ public class DoFnTester<InputT, OutputT> {
private final PipelineOptions options = PipelineOptionsFactory.create();
- /** The original DoFn under test. */
- private final DoFn<InputT, OutputT> origFn;
+ /** The original OldDoFn under test. */
+ private final OldDoFn<InputT, OutputT> origFn;
/**
- * Whether to clone the original {@link DoFn} or just use it as-is.
+ * Whether to clone the original {@link OldDoFn} or just use it as-is.
*
- * <p></p>Worker-side {@link DoFn DoFns} may not be serializable, and are not required to be.
+ * <p></p>Worker-side {@link OldDoFn DoFns} may not be serializable, and are not required to be.
*/
private CloningBehavior cloningBehavior = CloningBehavior.CLONE;
- /** The side input values to provide to the DoFn under test. */
+ /** The side input values to provide to the OldDoFn under test. */
private Map<PCollectionView<?>, Map<BoundedWindow, ?>> sideInputs =
new HashMap<>();
private Map<String, Object> accumulators;
- /** The output tags used by the DoFn under test. */
+ /** The output tags used by the OldDoFn under test. */
private TupleTag<OutputT> mainOutputTag = new TupleTag<>();
- /** The original DoFn under test, if started. */
- DoFn<InputT, OutputT> fn;
+ /** The original OldDoFn under test, if started. */
+ OldDoFn<InputT, OutputT> fn;
/** The ListOutputManager to examine the outputs. */
private Map<TupleTag<?>, List<WindowedValue<?>>> outputs;
- /** The state of processing of the DoFn under test. */
+ /** The state of processing of the OldDoFn under test. */
private State state;
- private DoFnTester(DoFn<InputT, OutputT> origFn) {
+ private DoFnTester(OldDoFn<InputT, OutputT> origFn) {
this.origFn = origFn;
resetState();
}
@@ -705,7 +705,7 @@ public class DoFnTester<InputT, OutputT> {
if (cloningBehavior.equals(CloningBehavior.DO_NOT_CLONE)) {
fn = origFn;
} else {
- fn = (DoFn<InputT, OutputT>)
+ fn = (OldDoFn<InputT, OutputT>)
SerializableUtils.deserializeFromByteArray(
SerializableUtils.serializeToByteArray(origFn),
origFn.toString());
[18/19] incubator-beam git commit: Port DebuggingWordCount example
from OldDoFn to DoFn
Posted by dh...@apache.org.
Port DebuggingWordCount example from OldDoFn to DoFn
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/49d2f170
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/49d2f170
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/49d2f170
Branch: refs/heads/master
Commit: 49d2f1706f69c5106a9082ffd2fecaf69b2d868c
Parents: ca9e337
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:29:18 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700
----------------------------------------------------------------------
.../java/org/apache/beam/examples/DebuggingWordCount.java | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/49d2f170/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
index 3c43152..c1b273c 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
import org.apache.beam.sdk.values.KV;
@@ -106,8 +106,8 @@ import java.util.regex.Pattern;
* overridden with {@code --inputFile}.
*/
public class DebuggingWordCount {
- /** A OldDoFn that filters for a specific key based upon a regular expression. */
- public static class FilterTextFn extends OldDoFn<KV<String, Long>, KV<String, Long>> {
+ /** A DoFn that filters for a specific key based upon a regular expression. */
+ public static class FilterTextFn extends DoFn<KV<String, Long>, KV<String, Long>> {
/**
* Concept #1: The logger below uses the fully qualified class name of FilterTextFn
* as the logger. All log statements emitted by this logger will be referenced by this name
@@ -133,7 +133,7 @@ public class DebuggingWordCount {
private final Aggregator<Long, Long> unmatchedWords =
createAggregator("umatchedWords", new Sum.SumLongFn());
- @Override
+ @ProcessElement
public void processElement(ProcessContext c) {
if (filter.matcher(c.element().getKey()).matches()) {
// Log at the "DEBUG" level each element that we match. When executing this pipeline
[03/19] incubator-beam git commit: Rename DoFnWithContext to DoFn
Posted by dh...@apache.org.
Rename DoFnWithContext to DoFn
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/3bcb6f46
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/3bcb6f46
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/3bcb6f46
Branch: refs/heads/master
Commit: 3bcb6f46ad0ae483d1d8785edc2d9d5846c71a73
Parents: e160966
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:10:01 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:52 2016 -0700
----------------------------------------------------------------------
.../org/apache/beam/sdk/transforms/DoFn.java | 429 +++++++++++++++++++
.../beam/sdk/transforms/DoFnReflector.java | 84 ++--
.../apache/beam/sdk/transforms/DoFnTester.java | 2 +-
.../beam/sdk/transforms/DoFnWithContext.java | 429 -------------------
.../org/apache/beam/sdk/transforms/OldDoFn.java | 2 +-
.../org/apache/beam/sdk/transforms/ParDo.java | 16 +-
.../beam/sdk/transforms/DoFnReflectorTest.java | 86 ++--
.../apache/beam/sdk/transforms/DoFnTest.java | 237 ++++++++++
.../sdk/transforms/DoFnWithContextTest.java | 237 ----------
.../apache/beam/sdk/transforms/ParDoTest.java | 12 +-
.../dofnreflector/DoFnReflectorTestHelper.java | 26 +-
.../transforms/DoFnReflectorBenchmark.java | 30 +-
12 files changed, 795 insertions(+), 795 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
new file mode 100644
index 0000000..eb6753c
--- /dev/null
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
@@ -0,0 +1,429 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.transforms;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
+import org.apache.beam.sdk.annotations.Experimental;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.transforms.Combine.CombineFn;
+import org.apache.beam.sdk.transforms.OldDoFn.DelegatingAggregator;
+import org.apache.beam.sdk.transforms.display.DisplayData;
+import org.apache.beam.sdk.transforms.display.HasDisplayData;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.transforms.windowing.PaneInfo;
+import org.apache.beam.sdk.util.WindowingInternals;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.TypeDescriptor;
+
+import org.joda.time.Duration;
+import org.joda.time.Instant;
+
+import java.io.Serializable;
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * The argument to {@link ParDo} providing the code to use to process
+ * elements of the input
+ * {@link org.apache.beam.sdk.values.PCollection}.
+ *
+ * <p>See {@link ParDo} for more explanation, examples of use, and
+ * discussion of constraints on {@code DoFn}s, including their
+ * serializability, lack of access to global shared mutable state,
+ * requirements for failure tolerance, and benefits of optimization.
+ *
+ * <p>{@code DoFn}s can be tested in a particular
+ * {@code Pipeline} by running that {@code Pipeline} on sample input
+ * and then checking its output. Unit testing of a {@code DoFn},
+ * separately from any {@code ParDo} transform or {@code Pipeline},
+ * can be done via the {@link DoFnTester} harness.
+ *
+ * <p>Implementations must define a method annotated with {@link ProcessElement}
+ * that satisfies the requirements described there. See the {@link ProcessElement}
+ * for details.
+ *
+ * <p>This functionality is experimental and likely to change.
+ *
+ * <p>Example usage:
+ *
+ * <pre> {@code
+ * PCollection<String> lines = ... ;
+ * PCollection<String> words =
+ * lines.apply(ParDo.of(new DoFn<String, String>() {
+ * @ProcessElement
+ * public void processElement(ProcessContext c, BoundedWindow window) {
+ *
+ * }}));
+ * } </pre>
+ *
+ * @param <InputT> the type of the (main) input elements
+ * @param <OutputT> the type of the (main) output elements
+ */
+@Experimental
+public abstract class DoFn<InputT, OutputT> implements Serializable, HasDisplayData {
+
+ /** Information accessible to all methods in this {@code DoFn}. */
+ public abstract class Context {
+
+ /**
+ * Returns the {@code PipelineOptions} specified with the
+ * {@link org.apache.beam.sdk.runners.PipelineRunner}
+ * invoking this {@code DoFn}. The {@code PipelineOptions} will
+ * be the default running via {@link DoFnTester}.
+ */
+ public abstract PipelineOptions getPipelineOptions();
+
+ /**
+ * Adds the given element to the main output {@code PCollection}.
+ *
+ * <p>Once passed to {@code output} the element should not be modified in
+ * any way.
+ *
+ * <p>If invoked from {@link ProcessElement}, the output
+ * element will have the same timestamp and be in the same windows
+ * as the input element passed to the method annotated with
+ * {@code @ProcessElement}.
+ *
+ * <p>If invoked from {@link StartBundle} or {@link FinishBundle},
+ * this will attempt to use the
+ * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+ * of the input {@code PCollection} to determine what windows the element
+ * should be in, throwing an exception if the {@code WindowFn} attempts
+ * to access any information about the input element. The output element
+ * will have a timestamp of negative infinity.
+ */
+ public abstract void output(OutputT output);
+
+ /**
+ * Adds the given element to the main output {@code PCollection},
+ * with the given timestamp.
+ *
+ * <p>Once passed to {@code outputWithTimestamp} the element should not be
+ * modified in any way.
+ *
+ * <p>If invoked from {@link ProcessElement}), the timestamp
+ * must not be older than the input element's timestamp minus
+ * {@link OldDoFn#getAllowedTimestampSkew}. The output element will
+ * be in the same windows as the input element.
+ *
+ * <p>If invoked from {@link StartBundle} or {@link FinishBundle},
+ * this will attempt to use the
+ * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+ * of the input {@code PCollection} to determine what windows the element
+ * should be in, throwing an exception if the {@code WindowFn} attempts
+ * to access any information about the input element except for the
+ * timestamp.
+ */
+ public abstract void outputWithTimestamp(OutputT output, Instant timestamp);
+
+ /**
+ * Adds the given element to the side output {@code PCollection} with the
+ * given tag.
+ *
+ * <p>Once passed to {@code sideOutput} the element should not be modified
+ * in any way.
+ *
+ * <p>The caller of {@code ParDo} uses {@link ParDo#withOutputTags} to
+ * specify the tags of side outputs that it consumes. Non-consumed side
+ * outputs, e.g., outputs for monitoring purposes only, don't necessarily
+ * need to be specified.
+ *
+ * <p>The output element will have the same timestamp and be in the same
+ * windows as the input element passed to {@link ProcessElement}).
+ *
+ * <p>If invoked from {@link StartBundle} or {@link FinishBundle},
+ * this will attempt to use the
+ * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+ * of the input {@code PCollection} to determine what windows the element
+ * should be in, throwing an exception if the {@code WindowFn} attempts
+ * to access any information about the input element. The output element
+ * will have a timestamp of negative infinity.
+ *
+ * @see ParDo#withOutputTags
+ */
+ public abstract <T> void sideOutput(TupleTag<T> tag, T output);
+
+ /**
+ * Adds the given element to the specified side output {@code PCollection},
+ * with the given timestamp.
+ *
+ * <p>Once passed to {@code sideOutputWithTimestamp} the element should not be
+ * modified in any way.
+ *
+ * <p>If invoked from {@link ProcessElement}), the timestamp
+ * must not be older than the input element's timestamp minus
+ * {@link OldDoFn#getAllowedTimestampSkew}. The output element will
+ * be in the same windows as the input element.
+ *
+ * <p>If invoked from {@link StartBundle} or {@link FinishBundle},
+ * this will attempt to use the
+ * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+ * of the input {@code PCollection} to determine what windows the element
+ * should be in, throwing an exception if the {@code WindowFn} attempts
+ * to access any information about the input element except for the
+ * timestamp.
+ *
+ * @see ParDo#withOutputTags
+ */
+ public abstract <T> void sideOutputWithTimestamp(
+ TupleTag<T> tag, T output, Instant timestamp);
+ }
+
+ /**
+ * Information accessible when running {@link OldDoFn#processElement}.
+ */
+ public abstract class ProcessContext extends Context {
+
+ /**
+ * Returns the input element to be processed.
+ *
+ * <p>The element will not be changed -- it is safe to cache, etc.
+ * without copying.
+ */
+ public abstract InputT element();
+
+
+ /**
+ * Returns the value of the side input.
+ *
+ * @throws IllegalArgumentException if this is not a side input
+ * @see ParDo#withSideInputs
+ */
+ public abstract <T> T sideInput(PCollectionView<T> view);
+
+ /**
+ * Returns the timestamp of the input element.
+ *
+ * <p>See {@link org.apache.beam.sdk.transforms.windowing.Window}
+ * for more information.
+ */
+ public abstract Instant timestamp();
+
+ /**
+ * Returns information about the pane within this window into which the
+ * input element has been assigned.
+ *
+ * <p>Generally all data is in a single, uninteresting pane unless custom
+ * triggering and/or late data has been explicitly requested.
+ * See {@link org.apache.beam.sdk.transforms.windowing.Window}
+ * for more information.
+ */
+ public abstract PaneInfo pane();
+ }
+
+ /**
+ * Returns the allowed timestamp skew duration, which is the maximum
+ * duration that timestamps can be shifted backward in
+ * {@link DoFn.Context#outputWithTimestamp}.
+ *
+ * <p>The default value is {@code Duration.ZERO}, in which case
+ * timestamps can only be shifted forward to future. For infinite
+ * skew, return {@code Duration.millis(Long.MAX_VALUE)}.
+ */
+ public Duration getAllowedTimestampSkew() {
+ return Duration.ZERO;
+ }
+
+ /////////////////////////////////////////////////////////////////////////////
+
+ Map<String, DelegatingAggregator<?, ?>> aggregators = new HashMap<>();
+
+ /**
+ * Protects aggregators from being created after initialization.
+ */
+ private boolean aggregatorsAreFinal;
+
+ /**
+ * Returns a {@link TypeDescriptor} capturing what is known statically
+ * about the input type of this {@code DoFn} instance's most-derived
+ * class.
+ *
+ * <p>See {@link #getOutputTypeDescriptor} for more discussion.
+ */
+ protected TypeDescriptor<InputT> getInputTypeDescriptor() {
+ return new TypeDescriptor<InputT>(getClass()) {};
+ }
+
+ /**
+ * Returns a {@link TypeDescriptor} capturing what is known statically
+ * about the output type of this {@code DoFn} instance's
+ * most-derived class.
+ *
+ * <p>In the normal case of a concrete {@code DoFn} subclass with
+ * no generic type parameters of its own (including anonymous inner
+ * classes), this will be a complete non-generic type, which is good
+ * for choosing a default output {@code Coder<O>} for the output
+ * {@code PCollection<O>}.
+ */
+ protected TypeDescriptor<OutputT> getOutputTypeDescriptor() {
+ return new TypeDescriptor<OutputT>(getClass()) {};
+ }
+
+ /**
+ * Interface for runner implementors to provide implementations of extra context information.
+ *
+ * <p>The methods on this interface are called by {@link DoFnReflector} before invoking an
+ * annotated {@link StartBundle}, {@link ProcessElement} or {@link FinishBundle} method that
+ * has indicated it needs the given extra context.
+ *
+ * <p>In the case of {@link ProcessElement} it is called once per invocation of
+ * {@link ProcessElement}.
+ */
+ public interface ExtraContextFactory<InputT, OutputT> {
+ /**
+ * Construct the {@link BoundedWindow} to use within a {@link DoFn} that
+ * needs it. This is called if the {@link ProcessElement} method has a parameter of type
+ * {@link BoundedWindow}.
+ *
+ * @return {@link BoundedWindow} of the element currently being processed.
+ */
+ BoundedWindow window();
+
+ /**
+ * Construct the {@link WindowingInternals} to use within a {@link DoFn} that
+ * needs it. This is called if the {@link ProcessElement} method has a parameter of type
+ * {@link WindowingInternals}.
+ */
+ WindowingInternals<InputT, OutputT> windowingInternals();
+ }
+
+ /////////////////////////////////////////////////////////////////////////////
+
+ /**
+ * Annotation for the method to use to prepare an instance for processing a batch of elements.
+ * The method annotated with this must satisfy the following constraints:
+ * <ul>
+ * <li>It must have at least one argument.
+ * <li>Its first (and only) argument must be a {@link DoFn.Context}.
+ * </ul>
+ */
+ @Documented
+ @Retention(RetentionPolicy.RUNTIME)
+ @Target(ElementType.METHOD)
+ public @interface StartBundle {}
+
+ /**
+ * Annotation for the method to use for processing elements. A subclass of
+ * {@link DoFn} must have a method with this annotation satisfying
+ * the following constraints in order for it to be executable:
+ * <ul>
+ * <li>It must have at least one argument.
+ * <li>Its first argument must be a {@link DoFn.ProcessContext}.
+ * <li>Its remaining arguments must be {@link BoundedWindow}, or
+ * {@link WindowingInternals WindowingInternals<InputT, OutputT>}.
+ * </ul>
+ */
+ @Documented
+ @Retention(RetentionPolicy.RUNTIME)
+ @Target(ElementType.METHOD)
+ public @interface ProcessElement {}
+
+ /**
+ * Annotation for the method to use to prepare an instance for processing a batch of elements.
+ * The method annotated with this must satisfy the following constraints:
+ * <ul>
+ * <li>It must have at least one argument.
+ * <li>Its first (and only) argument must be a {@link DoFn.Context}.
+ * </ul>
+ */
+ @Documented
+ @Retention(RetentionPolicy.RUNTIME)
+ @Target(ElementType.METHOD)
+ public @interface FinishBundle {}
+
+ /**
+ * Returns an {@link Aggregator} with aggregation logic specified by the
+ * {@link CombineFn} argument. The name provided must be unique across
+ * {@link Aggregator}s created within the OldDoFn. Aggregators can only be created
+ * during pipeline construction.
+ *
+ * @param name the name of the aggregator
+ * @param combiner the {@link CombineFn} to use in the aggregator
+ * @return an aggregator for the provided name and combiner in the scope of
+ * this OldDoFn
+ * @throws NullPointerException if the name or combiner is null
+ * @throws IllegalArgumentException if the given name collides with another
+ * aggregator in this scope
+ * @throws IllegalStateException if called during pipeline execution.
+ */
+ public final <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
+ createAggregator(String name, Combine.CombineFn<? super AggInputT, ?, AggOutputT> combiner) {
+ checkNotNull(name, "name cannot be null");
+ checkNotNull(combiner, "combiner cannot be null");
+ checkArgument(!aggregators.containsKey(name),
+ "Cannot create aggregator with name %s."
+ + " An Aggregator with that name already exists within this scope.",
+ name);
+ checkState(!aggregatorsAreFinal,
+ "Cannot create an aggregator during pipeline execution."
+ + " Aggregators should be registered during pipeline construction.");
+
+ DelegatingAggregator<AggInputT, AggOutputT> aggregator =
+ new DelegatingAggregator<>(name, combiner);
+ aggregators.put(name, aggregator);
+ return aggregator;
+ }
+
+ /**
+ * Returns an {@link Aggregator} with the aggregation logic specified by the
+ * {@link SerializableFunction} argument. The name provided must be unique
+ * across {@link Aggregator}s created within the OldDoFn. Aggregators can only be
+ * created during pipeline construction.
+ *
+ * @param name the name of the aggregator
+ * @param combiner the {@link SerializableFunction} to use in the aggregator
+ * @return an aggregator for the provided name and combiner in the scope of
+ * this OldDoFn
+ * @throws NullPointerException if the name or combiner is null
+ * @throws IllegalArgumentException if the given name collides with another
+ * aggregator in this scope
+ * @throws IllegalStateException if called during pipeline execution.
+ */
+ public final <AggInputT> Aggregator<AggInputT, AggInputT> createAggregator(
+ String name, SerializableFunction<Iterable<AggInputT>, AggInputT> combiner) {
+ checkNotNull(combiner, "combiner cannot be null.");
+ return createAggregator(name, Combine.IterableCombineFn.of(combiner));
+ }
+
+ /**
+ * Finalize the {@link DoFn} construction to prepare for processing.
+ * This method should be called by runners before any processing methods.
+ */
+ public void prepareForProcessing() {
+ aggregatorsAreFinal = true;
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * <p>By default, does not register any display data. Implementors may override this method
+ * to provide their own display data.
+ */
+ @Override
+ public void populateDisplayData(DisplayData.Builder builder) {
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
index d8d4181..b504cb4 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnReflector.java
@@ -18,10 +18,10 @@
package org.apache.beam.sdk.transforms;
import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFnWithContext.ExtraContextFactory;
-import org.apache.beam.sdk.transforms.DoFnWithContext.FinishBundle;
-import org.apache.beam.sdk.transforms.DoFnWithContext.ProcessElement;
-import org.apache.beam.sdk.transforms.DoFnWithContext.StartBundle;
+import org.apache.beam.sdk.transforms.DoFn.ExtraContextFactory;
+import org.apache.beam.sdk.transforms.DoFn.FinishBundle;
+import org.apache.beam.sdk.transforms.DoFn.ProcessElement;
+import org.apache.beam.sdk.transforms.DoFn.StartBundle;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -94,7 +94,7 @@ import javax.annotation.Nullable;
/**
- * Utility implementing the necessary reflection for working with {@link DoFnWithContext}s.
+ * Utility implementing the necessary reflection for working with {@link DoFn}s.
*/
public abstract class DoFnReflector {
@@ -109,7 +109,7 @@ public abstract class DoFnReflector {
/**
* Enumeration of the parameters available from the {@link ExtraContextFactory} to use as
- * additional parameters for {@link DoFnWithContext} methods.
+ * additional parameters for {@link DoFn} methods.
* <p>
* We don't rely on looking for properly annotated methods within {@link ExtraContextFactory}
* because erasure would make it impossible to completely fill in the type token for context
@@ -139,7 +139,7 @@ public abstract class DoFnReflector {
/**
* Create a type token representing the given parameter. May use the type token associated
- * with the input and output types of the {@link DoFnWithContext}, depending on the extra
+ * with the input and output types of the {@link DoFn}, depending on the extra
* context.
*/
abstract <InputT, OutputT> TypeToken<?> tokenFor(
@@ -190,22 +190,22 @@ public abstract class DoFnReflector {
}
/**
- * @return true if the reflected {@link DoFnWithContext} uses a Single Window.
+ * @return true if the reflected {@link DoFn} uses a Single Window.
*/
public abstract boolean usesSingleWindow();
/** Create an {@link DoFnInvoker} bound to the given {@link OldDoFn}. */
public abstract <InputT, OutputT> DoFnInvoker<InputT, OutputT> bindInvoker(
- DoFnWithContext<InputT, OutputT> fn);
+ DoFn<InputT, OutputT> fn);
private static final Map<Class<?>, DoFnReflector> REFLECTOR_CACHE =
new LinkedHashMap<Class<?>, DoFnReflector>();
/**
- * @return the {@link DoFnReflector} for the given {@link DoFnWithContext}.
+ * @return the {@link DoFnReflector} for the given {@link DoFn}.
*/
public static DoFnReflector of(
- @SuppressWarnings("rawtypes") Class<? extends DoFnWithContext> fn) {
+ @SuppressWarnings("rawtypes") Class<? extends DoFn> fn) {
DoFnReflector reflector = REFLECTOR_CACHE.get(fn);
if (reflector != null) {
return reflector;
@@ -217,9 +217,9 @@ public abstract class DoFnReflector {
}
/**
- * Create a {@link OldDoFn} that the {@link DoFnWithContext}.
+ * Create a {@link OldDoFn} that the {@link DoFn}.
*/
- public <InputT, OutputT> OldDoFn<InputT, OutputT> toDoFn(DoFnWithContext<InputT, OutputT> fn) {
+ public <InputT, OutputT> OldDoFn<InputT, OutputT> toDoFn(DoFn<InputT, OutputT> fn) {
if (usesSingleWindow()) {
return new WindowDoFnAdapter<InputT, OutputT>(this, fn);
} else {
@@ -259,7 +259,7 @@ public abstract class DoFnReflector {
static <InputT, OutputT> List<AdditionalParameter> verifyProcessMethodArguments(Method m) {
return verifyMethodArguments(m,
EXTRA_PROCESS_CONTEXTS,
- new TypeToken<DoFnWithContext<InputT, OutputT>.ProcessContext>() {},
+ new TypeToken<DoFn<InputT, OutputT>.ProcessContext>() {},
new TypeParameter<InputT>() {},
new TypeParameter<OutputT>() {});
}
@@ -271,13 +271,13 @@ public abstract class DoFnReflector {
}
return verifyMethodArguments(m,
EXTRA_CONTEXTS,
- new TypeToken<DoFnWithContext<InputT, OutputT>.Context>() {},
+ new TypeToken<DoFn<InputT, OutputT>.Context>() {},
new TypeParameter<InputT>() {},
new TypeParameter<OutputT>() {});
}
/**
- * Verify the method arguments for a given {@link DoFnWithContext} method.
+ * Verify the method arguments for a given {@link DoFn} method.
*
* <p>The requirements for a method to be valid, are:
* <ol>
@@ -330,7 +330,7 @@ public abstract class DoFnReflector {
// Fill in the generics in the allExtraContextArgs interface from the types in the
// Context or ProcessContext OldDoFn.
ParameterizedType pt = (ParameterizedType) contextToken.getType();
- // We actually want the owner, since ProcessContext and Context are owned by DoFnWithContext.
+ // We actually want the owner, since ProcessContext and Context are owned by DoFn.
pt = (ParameterizedType) pt.getOwnerType();
@SuppressWarnings("unchecked")
TypeToken<InputT> iActual = (TypeToken<InputT>) TypeToken.of(pt.getActualTypeArguments()[0]);
@@ -368,21 +368,21 @@ public abstract class DoFnReflector {
public interface DoFnInvoker<InputT, OutputT> {
/** Invoke {@link OldDoFn#startBundle} on the bound {@code OldDoFn}. */
void invokeStartBundle(
- DoFnWithContext<InputT, OutputT>.Context c,
+ DoFn<InputT, OutputT>.Context c,
ExtraContextFactory<InputT, OutputT> extra);
/** Invoke {@link OldDoFn#finishBundle} on the bound {@code OldDoFn}. */
void invokeFinishBundle(
- DoFnWithContext<InputT, OutputT>.Context c,
+ DoFn<InputT, OutputT>.Context c,
ExtraContextFactory<InputT, OutputT> extra);
/** Invoke {@link OldDoFn#processElement} on the bound {@code OldDoFn}. */
public void invokeProcessElement(
- DoFnWithContext<InputT, OutputT>.ProcessContext c,
+ DoFn<InputT, OutputT>.ProcessContext c,
ExtraContextFactory<InputT, OutputT> extra);
}
/**
- * Implementation of {@link DoFnReflector} for the arbitrary {@link DoFnWithContext}.
+ * Implementation of {@link DoFnReflector} for the arbitrary {@link DoFn}.
*/
private static class GenericDoFnReflector extends DoFnReflector {
@@ -395,7 +395,7 @@ public abstract class DoFnReflector {
private final Constructor<?> constructor;
private GenericDoFnReflector(
- @SuppressWarnings("rawtypes") Class<? extends DoFnWithContext> fn) {
+ @SuppressWarnings("rawtypes") Class<? extends DoFn> fn) {
// Locate the annotated methods
this.processElement = findAnnotatedMethod(ProcessElement.class, fn, true);
this.startBundle = findAnnotatedMethod(StartBundle.class, fn, false);
@@ -442,7 +442,7 @@ public abstract class DoFnReflector {
private static Method findAnnotatedMethod(
Class<? extends Annotation> anno, Class<?> fnClazz, boolean required) {
Collection<Method> matches = declaredMethodsWithAnnotation(
- anno, fnClazz, DoFnWithContext.class);
+ anno, fnClazz, DoFn.class);
if (matches.size() == 0) {
if (required == true) {
@@ -493,12 +493,12 @@ public abstract class DoFnReflector {
/**
* Use ByteBuddy to generate the code for a {@link DoFnInvoker} that invokes the given
- * {@link DoFnWithContext}.
+ * {@link DoFn}.
* @param clazz
* @return
*/
private Constructor<? extends DoFnInvoker<?, ?>> createInvokerConstructor(
- @SuppressWarnings("rawtypes") Class<? extends DoFnWithContext> clazz) {
+ @SuppressWarnings("rawtypes") Class<? extends DoFn> clazz) {
final TypeDescription clazzDescription = new TypeDescription.ForLoadedType(clazz);
@@ -545,7 +545,7 @@ public abstract class DoFnReflector {
@Override
public <InputT, OutputT> DoFnInvoker<InputT, OutputT> bindInvoker(
- DoFnWithContext<InputT, OutputT> fn) {
+ DoFn<InputT, OutputT> fn) {
try {
@SuppressWarnings("unchecked")
DoFnInvoker<InputT, OutputT> invoker =
@@ -562,13 +562,13 @@ public abstract class DoFnReflector {
}
private static class ContextAdapter<InputT, OutputT>
- extends DoFnWithContext<InputT, OutputT>.Context
- implements DoFnWithContext.ExtraContextFactory<InputT, OutputT> {
+ extends DoFn<InputT, OutputT>.Context
+ implements DoFn.ExtraContextFactory<InputT, OutputT> {
private OldDoFn<InputT, OutputT>.Context context;
private ContextAdapter(
- DoFnWithContext<InputT, OutputT> fn, OldDoFn<InputT, OutputT>.Context context) {
+ DoFn<InputT, OutputT> fn, OldDoFn<InputT, OutputT>.Context context) {
fn.super();
this.context = context;
}
@@ -600,14 +600,14 @@ public abstract class DoFnReflector {
@Override
public BoundedWindow window() {
- // The DoFnWithContext doesn't allow us to ask for these outside ProcessElements, so this
+ // The DoFn doesn't allow us to ask for these outside ProcessElements, so this
// should be unreachable.
throw new UnsupportedOperationException("Can only get the window in ProcessElements");
}
@Override
public WindowingInternals<InputT, OutputT> windowingInternals() {
- // The DoFnWithContext doesn't allow us to ask for these outside ProcessElements, so this
+ // The DoFn doesn't allow us to ask for these outside ProcessElements, so this
// should be unreachable.
throw new UnsupportedOperationException(
"Can only get the windowingInternals in ProcessElements");
@@ -615,13 +615,13 @@ public abstract class DoFnReflector {
}
private static class ProcessContextAdapter<InputT, OutputT>
- extends DoFnWithContext<InputT, OutputT>.ProcessContext
- implements DoFnWithContext.ExtraContextFactory<InputT, OutputT> {
+ extends DoFn<InputT, OutputT>.ProcessContext
+ implements DoFn.ExtraContextFactory<InputT, OutputT> {
private OldDoFn<InputT, OutputT>.ProcessContext context;
private ProcessContextAdapter(
- DoFnWithContext<InputT, OutputT> fn,
+ DoFn<InputT, OutputT> fn,
OldDoFn<InputT, OutputT>.ProcessContext context) {
fn.super();
this.context = context;
@@ -693,10 +693,10 @@ public abstract class DoFnReflector {
private static class SimpleDoFnAdapter<InputT, OutputT> extends OldDoFn<InputT, OutputT> {
- private final DoFnWithContext<InputT, OutputT> fn;
+ private final DoFn<InputT, OutputT> fn;
private transient DoFnInvoker<InputT, OutputT> invoker;
- private SimpleDoFnAdapter(DoFnReflector reflector, DoFnWithContext<InputT, OutputT> fn) {
+ private SimpleDoFnAdapter(DoFnReflector reflector, DoFn<InputT, OutputT> fn) {
super(fn.aggregators);
this.fn = fn;
this.invoker = reflector.bindInvoker(fn);
@@ -745,7 +745,7 @@ public abstract class DoFnReflector {
private static class WindowDoFnAdapter<InputT, OutputT>
extends SimpleDoFnAdapter<InputT, OutputT> implements OldDoFn.RequiresWindowAccess {
- private WindowDoFnAdapter(DoFnReflector reflector, DoFnWithContext<InputT, OutputT> fn) {
+ private WindowDoFnAdapter(DoFnReflector reflector, DoFn<InputT, OutputT> fn) {
super(reflector, fn);
}
}
@@ -770,7 +770,7 @@ public abstract class DoFnReflector {
try {
prepareMethod = new MethodLocator.ForExplicitMethod(
new MethodDescription.ForLoadedMethod(
- DoFnWithContext.class.getDeclaredMethod("prepareForProcessing")))
+ DoFn.class.getDeclaredMethod("prepareForProcessing")))
.resolve(instrumentedMethod);
} catch (NoSuchMethodException | SecurityException e) {
throw new RuntimeException("Unable to locate prepareForProcessing method", e);
@@ -817,7 +817,7 @@ public abstract class DoFnReflector {
/**
* A byte-buddy {@link Implementation} that delegates a call that receives
- * {@link AdditionalParameter} to the given {@link DoFnWithContext} method.
+ * {@link AdditionalParameter} to the given {@link DoFn} method.
*/
private static final class InvokerDelegation implements Implementation {
@Nullable
@@ -845,7 +845,7 @@ public abstract class DoFnReflector {
/**
* Generate the {@link Implementation} of one of the life-cycle methods of a
- * {@link DoFnWithContext}.
+ * {@link DoFn}.
*/
private static Implementation create(
@Nullable final Method target, BeforeDelegation before, List<AdditionalParameter> args) {
@@ -869,7 +869,7 @@ public abstract class DoFnReflector {
}
/**
- * Stack manipulation to push the {@link DoFnWithContext} reference stored in the
+ * Stack manipulation to push the {@link DoFn} reference stored in the
* delegate field of the invoker on to the top of the stack.
*
* <p>This implementation is derived from the code for
@@ -1018,7 +1018,7 @@ public abstract class DoFnReflector {
/**
* A constructor {@link Implementation} for a {@link DoFnInvoker class}. Produces the byte code
* for a constructor that takes a single argument and assigns it to the delegate field.
- * {@link AdditionalParameter} to the given {@link DoFnWithContext} method.
+ * {@link AdditionalParameter} to the given {@link DoFn} method.
*/
private static final class InvokerConstructor implements Implementation {
@Override
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
index 9336e4c..f44a9ae 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
@@ -90,7 +90,7 @@ public class DoFnTester<InputT, OutputT> {
*/
@SuppressWarnings("unchecked")
public static <InputT, OutputT> DoFnTester<InputT, OutputT>
- of(DoFnWithContext<InputT, OutputT> fn) {
+ of(DoFn<InputT, OutputT> fn) {
return new DoFnTester<InputT, OutputT>(DoFnReflector.of(fn.getClass()).toDoFn(fn));
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java
deleted file mode 100644
index b27163a..0000000
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java
+++ /dev/null
@@ -1,429 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.transforms;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-import static com.google.common.base.Preconditions.checkState;
-
-import org.apache.beam.sdk.annotations.Experimental;
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.OldDoFn.DelegatingAggregator;
-import org.apache.beam.sdk.transforms.display.DisplayData;
-import org.apache.beam.sdk.transforms.display.HasDisplayData;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.transforms.windowing.PaneInfo;
-import org.apache.beam.sdk.util.WindowingInternals;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.sdk.values.TypeDescriptor;
-
-import org.joda.time.Duration;
-import org.joda.time.Instant;
-
-import java.io.Serializable;
-import java.lang.annotation.Documented;
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * The argument to {@link ParDo} providing the code to use to process
- * elements of the input
- * {@link org.apache.beam.sdk.values.PCollection}.
- *
- * <p>See {@link ParDo} for more explanation, examples of use, and
- * discussion of constraints on {@code DoFnWithContext}s, including their
- * serializability, lack of access to global shared mutable state,
- * requirements for failure tolerance, and benefits of optimization.
- *
- * <p>{@code DoFnWithContext}s can be tested in a particular
- * {@code Pipeline} by running that {@code Pipeline} on sample input
- * and then checking its output. Unit testing of a {@code DoFnWithContext},
- * separately from any {@code ParDo} transform or {@code Pipeline},
- * can be done via the {@link DoFnTester} harness.
- *
- * <p>Implementations must define a method annotated with {@link ProcessElement}
- * that satisfies the requirements described there. See the {@link ProcessElement}
- * for details.
- *
- * <p>This functionality is experimental and likely to change.
- *
- * <p>Example usage:
- *
- * <pre> {@code
- * PCollection<String> lines = ... ;
- * PCollection<String> words =
- * lines.apply(ParDo.of(new DoFnWithContext<String, String>() {
- * @ProcessElement
- * public void processElement(ProcessContext c, BoundedWindow window) {
- *
- * }}));
- * } </pre>
- *
- * @param <InputT> the type of the (main) input elements
- * @param <OutputT> the type of the (main) output elements
- */
-@Experimental
-public abstract class DoFnWithContext<InputT, OutputT> implements Serializable, HasDisplayData {
-
- /** Information accessible to all methods in this {@code DoFnWithContext}. */
- public abstract class Context {
-
- /**
- * Returns the {@code PipelineOptions} specified with the
- * {@link org.apache.beam.sdk.runners.PipelineRunner}
- * invoking this {@code DoFnWithContext}. The {@code PipelineOptions} will
- * be the default running via {@link DoFnTester}.
- */
- public abstract PipelineOptions getPipelineOptions();
-
- /**
- * Adds the given element to the main output {@code PCollection}.
- *
- * <p>Once passed to {@code output} the element should not be modified in
- * any way.
- *
- * <p>If invoked from {@link ProcessElement}, the output
- * element will have the same timestamp and be in the same windows
- * as the input element passed to the method annotated with
- * {@code @ProcessElement}.
- *
- * <p>If invoked from {@link StartBundle} or {@link FinishBundle},
- * this will attempt to use the
- * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
- * of the input {@code PCollection} to determine what windows the element
- * should be in, throwing an exception if the {@code WindowFn} attempts
- * to access any information about the input element. The output element
- * will have a timestamp of negative infinity.
- */
- public abstract void output(OutputT output);
-
- /**
- * Adds the given element to the main output {@code PCollection},
- * with the given timestamp.
- *
- * <p>Once passed to {@code outputWithTimestamp} the element should not be
- * modified in any way.
- *
- * <p>If invoked from {@link ProcessElement}), the timestamp
- * must not be older than the input element's timestamp minus
- * {@link OldDoFn#getAllowedTimestampSkew}. The output element will
- * be in the same windows as the input element.
- *
- * <p>If invoked from {@link StartBundle} or {@link FinishBundle},
- * this will attempt to use the
- * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
- * of the input {@code PCollection} to determine what windows the element
- * should be in, throwing an exception if the {@code WindowFn} attempts
- * to access any information about the input element except for the
- * timestamp.
- */
- public abstract void outputWithTimestamp(OutputT output, Instant timestamp);
-
- /**
- * Adds the given element to the side output {@code PCollection} with the
- * given tag.
- *
- * <p>Once passed to {@code sideOutput} the element should not be modified
- * in any way.
- *
- * <p>The caller of {@code ParDo} uses {@link ParDo#withOutputTags} to
- * specify the tags of side outputs that it consumes. Non-consumed side
- * outputs, e.g., outputs for monitoring purposes only, don't necessarily
- * need to be specified.
- *
- * <p>The output element will have the same timestamp and be in the same
- * windows as the input element passed to {@link ProcessElement}).
- *
- * <p>If invoked from {@link StartBundle} or {@link FinishBundle},
- * this will attempt to use the
- * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
- * of the input {@code PCollection} to determine what windows the element
- * should be in, throwing an exception if the {@code WindowFn} attempts
- * to access any information about the input element. The output element
- * will have a timestamp of negative infinity.
- *
- * @see ParDo#withOutputTags
- */
- public abstract <T> void sideOutput(TupleTag<T> tag, T output);
-
- /**
- * Adds the given element to the specified side output {@code PCollection},
- * with the given timestamp.
- *
- * <p>Once passed to {@code sideOutputWithTimestamp} the element should not be
- * modified in any way.
- *
- * <p>If invoked from {@link ProcessElement}), the timestamp
- * must not be older than the input element's timestamp minus
- * {@link OldDoFn#getAllowedTimestampSkew}. The output element will
- * be in the same windows as the input element.
- *
- * <p>If invoked from {@link StartBundle} or {@link FinishBundle},
- * this will attempt to use the
- * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
- * of the input {@code PCollection} to determine what windows the element
- * should be in, throwing an exception if the {@code WindowFn} attempts
- * to access any information about the input element except for the
- * timestamp.
- *
- * @see ParDo#withOutputTags
- */
- public abstract <T> void sideOutputWithTimestamp(
- TupleTag<T> tag, T output, Instant timestamp);
- }
-
- /**
- * Information accessible when running {@link OldDoFn#processElement}.
- */
- public abstract class ProcessContext extends Context {
-
- /**
- * Returns the input element to be processed.
- *
- * <p>The element will not be changed -- it is safe to cache, etc.
- * without copying.
- */
- public abstract InputT element();
-
-
- /**
- * Returns the value of the side input.
- *
- * @throws IllegalArgumentException if this is not a side input
- * @see ParDo#withSideInputs
- */
- public abstract <T> T sideInput(PCollectionView<T> view);
-
- /**
- * Returns the timestamp of the input element.
- *
- * <p>See {@link org.apache.beam.sdk.transforms.windowing.Window}
- * for more information.
- */
- public abstract Instant timestamp();
-
- /**
- * Returns information about the pane within this window into which the
- * input element has been assigned.
- *
- * <p>Generally all data is in a single, uninteresting pane unless custom
- * triggering and/or late data has been explicitly requested.
- * See {@link org.apache.beam.sdk.transforms.windowing.Window}
- * for more information.
- */
- public abstract PaneInfo pane();
- }
-
- /**
- * Returns the allowed timestamp skew duration, which is the maximum
- * duration that timestamps can be shifted backward in
- * {@link DoFnWithContext.Context#outputWithTimestamp}.
- *
- * <p>The default value is {@code Duration.ZERO}, in which case
- * timestamps can only be shifted forward to future. For infinite
- * skew, return {@code Duration.millis(Long.MAX_VALUE)}.
- */
- public Duration getAllowedTimestampSkew() {
- return Duration.ZERO;
- }
-
- /////////////////////////////////////////////////////////////////////////////
-
- Map<String, DelegatingAggregator<?, ?>> aggregators = new HashMap<>();
-
- /**
- * Protects aggregators from being created after initialization.
- */
- private boolean aggregatorsAreFinal;
-
- /**
- * Returns a {@link TypeDescriptor} capturing what is known statically
- * about the input type of this {@code DoFnWithContext} instance's most-derived
- * class.
- *
- * <p>See {@link #getOutputTypeDescriptor} for more discussion.
- */
- protected TypeDescriptor<InputT> getInputTypeDescriptor() {
- return new TypeDescriptor<InputT>(getClass()) {};
- }
-
- /**
- * Returns a {@link TypeDescriptor} capturing what is known statically
- * about the output type of this {@code DoFnWithContext} instance's
- * most-derived class.
- *
- * <p>In the normal case of a concrete {@code DoFnWithContext} subclass with
- * no generic type parameters of its own (including anonymous inner
- * classes), this will be a complete non-generic type, which is good
- * for choosing a default output {@code Coder<O>} for the output
- * {@code PCollection<O>}.
- */
- protected TypeDescriptor<OutputT> getOutputTypeDescriptor() {
- return new TypeDescriptor<OutputT>(getClass()) {};
- }
-
- /**
- * Interface for runner implementors to provide implementations of extra context information.
- *
- * <p>The methods on this interface are called by {@link DoFnReflector} before invoking an
- * annotated {@link StartBundle}, {@link ProcessElement} or {@link FinishBundle} method that
- * has indicated it needs the given extra context.
- *
- * <p>In the case of {@link ProcessElement} it is called once per invocation of
- * {@link ProcessElement}.
- */
- public interface ExtraContextFactory<InputT, OutputT> {
- /**
- * Construct the {@link BoundedWindow} to use within a {@link DoFnWithContext} that
- * needs it. This is called if the {@link ProcessElement} method has a parameter of type
- * {@link BoundedWindow}.
- *
- * @return {@link BoundedWindow} of the element currently being processed.
- */
- BoundedWindow window();
-
- /**
- * Construct the {@link WindowingInternals} to use within a {@link DoFnWithContext} that
- * needs it. This is called if the {@link ProcessElement} method has a parameter of type
- * {@link WindowingInternals}.
- */
- WindowingInternals<InputT, OutputT> windowingInternals();
- }
-
- /////////////////////////////////////////////////////////////////////////////
-
- /**
- * Annotation for the method to use to prepare an instance for processing a batch of elements.
- * The method annotated with this must satisfy the following constraints:
- * <ul>
- * <li>It must have at least one argument.
- * <li>Its first (and only) argument must be a {@link DoFnWithContext.Context}.
- * </ul>
- */
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(ElementType.METHOD)
- public @interface StartBundle {}
-
- /**
- * Annotation for the method to use for processing elements. A subclass of
- * {@link DoFnWithContext} must have a method with this annotation satisfying
- * the following constraints in order for it to be executable:
- * <ul>
- * <li>It must have at least one argument.
- * <li>Its first argument must be a {@link DoFnWithContext.ProcessContext}.
- * <li>Its remaining arguments must be {@link BoundedWindow}, or
- * {@link WindowingInternals WindowingInternals<InputT, OutputT>}.
- * </ul>
- */
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(ElementType.METHOD)
- public @interface ProcessElement {}
-
- /**
- * Annotation for the method to use to prepare an instance for processing a batch of elements.
- * The method annotated with this must satisfy the following constraints:
- * <ul>
- * <li>It must have at least one argument.
- * <li>Its first (and only) argument must be a {@link DoFnWithContext.Context}.
- * </ul>
- */
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- @Target(ElementType.METHOD)
- public @interface FinishBundle {}
-
- /**
- * Returns an {@link Aggregator} with aggregation logic specified by the
- * {@link CombineFn} argument. The name provided must be unique across
- * {@link Aggregator}s created within the OldDoFn. Aggregators can only be created
- * during pipeline construction.
- *
- * @param name the name of the aggregator
- * @param combiner the {@link CombineFn} to use in the aggregator
- * @return an aggregator for the provided name and combiner in the scope of
- * this OldDoFn
- * @throws NullPointerException if the name or combiner is null
- * @throws IllegalArgumentException if the given name collides with another
- * aggregator in this scope
- * @throws IllegalStateException if called during pipeline execution.
- */
- public final <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
- createAggregator(String name, Combine.CombineFn<? super AggInputT, ?, AggOutputT> combiner) {
- checkNotNull(name, "name cannot be null");
- checkNotNull(combiner, "combiner cannot be null");
- checkArgument(!aggregators.containsKey(name),
- "Cannot create aggregator with name %s."
- + " An Aggregator with that name already exists within this scope.",
- name);
- checkState(!aggregatorsAreFinal,
- "Cannot create an aggregator during pipeline execution."
- + " Aggregators should be registered during pipeline construction.");
-
- DelegatingAggregator<AggInputT, AggOutputT> aggregator =
- new DelegatingAggregator<>(name, combiner);
- aggregators.put(name, aggregator);
- return aggregator;
- }
-
- /**
- * Returns an {@link Aggregator} with the aggregation logic specified by the
- * {@link SerializableFunction} argument. The name provided must be unique
- * across {@link Aggregator}s created within the OldDoFn. Aggregators can only be
- * created during pipeline construction.
- *
- * @param name the name of the aggregator
- * @param combiner the {@link SerializableFunction} to use in the aggregator
- * @return an aggregator for the provided name and combiner in the scope of
- * this OldDoFn
- * @throws NullPointerException if the name or combiner is null
- * @throws IllegalArgumentException if the given name collides with another
- * aggregator in this scope
- * @throws IllegalStateException if called during pipeline execution.
- */
- public final <AggInputT> Aggregator<AggInputT, AggInputT> createAggregator(
- String name, SerializableFunction<Iterable<AggInputT>, AggInputT> combiner) {
- checkNotNull(combiner, "combiner cannot be null.");
- return createAggregator(name, Combine.IterableCombineFn.of(combiner));
- }
-
- /**
- * Finalize the {@link DoFnWithContext} construction to prepare for processing.
- * This method should be called by runners before any processing methods.
- */
- void prepareForProcessing() {
- aggregatorsAreFinal = true;
- }
-
- /**
- * {@inheritDoc}
- *
- * <p>By default, does not register any display data. Implementors may override this method
- * to provide their own display data.
- */
- @Override
- public void populateDisplayData(DisplayData.Builder builder) {
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
index 48c6033..f640442 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
@@ -63,7 +63,7 @@ import java.util.UUID;
* separately from any {@code ParDo} transform or {@code Pipeline},
* can be done via the {@link DoFnTester} harness.
*
- * <p>{@link DoFnWithContext} (currently experimental) offers an alternative
+ * <p>{@link DoFn} (currently experimental) offers an alternative
* mechanism for accessing {@link ProcessContext#window()} without the need
* to implement {@link RequiresWindowAccess}.
*
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
index 36d8101..bb1af9c 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
@@ -527,13 +527,13 @@ public class ParDo {
}
private static <InputT, OutputT> OldDoFn<InputT, OutputT>
- adapt(DoFnWithContext<InputT, OutputT> fn) {
+ adapt(DoFn<InputT, OutputT> fn) {
return DoFnReflector.of(fn.getClass()).toDoFn(fn);
}
/**
* Creates a {@link ParDo} {@link PTransform} that will invoke the
- * given {@link DoFnWithContext} function.
+ * given {@link DoFn} function.
*
* <p>The resulting {@link PTransform PTransform's} types have been bound, with the
* input being a {@code PCollection<InputT>} and the output a
@@ -541,11 +541,11 @@ public class ParDo {
* {@code OldDoFn<InputT, OutputT>}. It is ready to be applied, or further
* properties can be set on it first.
*
- * <p>{@link DoFnWithContext} is an experimental alternative to
+ * <p>{@link DoFn} is an experimental alternative to
* {@link OldDoFn} which simplifies accessing the window of the element.
*/
@Experimental
- public static <InputT, OutputT> Bound<InputT, OutputT> of(DoFnWithContext<InputT, OutputT> fn) {
+ public static <InputT, OutputT> Bound<InputT, OutputT> of(DoFn<InputT, OutputT> fn) {
return of(adapt(fn), fn.getClass());
}
@@ -633,13 +633,13 @@ public class ParDo {
/**
* Returns a new {@link ParDo} {@link PTransform} that's like this
- * transform but which will invoke the given {@link DoFnWithContext}
+ * transform but which will invoke the given {@link DoFn}
* function, and which has its input and output types bound. Does
* not modify this transform. The resulting {@link PTransform} is
* sufficiently specified to be applied, but more properties can
* still be specified.
*/
- public <InputT, OutputT> Bound<InputT, OutputT> of(DoFnWithContext<InputT, OutputT> fn) {
+ public <InputT, OutputT> Bound<InputT, OutputT> of(DoFn<InputT, OutputT> fn) {
return of(adapt(fn), fn.getClass());
}
}
@@ -845,12 +845,12 @@ public class ParDo {
/**
* Returns a new multi-output {@link ParDo} {@link PTransform}
* that's like this transform but which will invoke the given
- * {@link DoFnWithContext} function, and which has its input type bound.
+ * {@link DoFn} function, and which has its input type bound.
* Does not modify this transform. The resulting
* {@link PTransform} is sufficiently specified to be applied, but
* more properties can still be specified.
*/
- public <InputT> BoundMulti<InputT, OutputT> of(DoFnWithContext<InputT, OutputT> fn) {
+ public <InputT> BoundMulti<InputT, OutputT> of(DoFn<InputT, OutputT> fn) {
return of(adapt(fn), fn.getClass());
}
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
index 0cb3d7b..df9e441 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
@@ -21,10 +21,10 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
-import org.apache.beam.sdk.transforms.DoFnWithContext.Context;
-import org.apache.beam.sdk.transforms.DoFnWithContext.ExtraContextFactory;
-import org.apache.beam.sdk.transforms.DoFnWithContext.ProcessContext;
-import org.apache.beam.sdk.transforms.DoFnWithContext.ProcessElement;
+import org.apache.beam.sdk.transforms.DoFn.Context;
+import org.apache.beam.sdk.transforms.DoFn.ExtraContextFactory;
+import org.apache.beam.sdk.transforms.DoFn.ProcessContext;
+import org.apache.beam.sdk.transforms.DoFn.ProcessElement;
import org.apache.beam.sdk.transforms.dofnreflector.DoFnReflectorTestHelper;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.UserCodeException;
@@ -61,13 +61,13 @@ public class DoFnReflectorTest {
}
}
- private DoFnWithContext<String, String> fn;
+ private DoFn<String, String> fn;
@Rule
public ExpectedException thrown = ExpectedException.none();
@Mock
- private DoFnWithContext<String, String>.ProcessContext mockContext;
+ private DoFn<String, String>.ProcessContext mockContext;
@Mock
private BoundedWindow mockWindow;
@Mock
@@ -91,7 +91,7 @@ public class DoFnReflectorTest {
};
}
- private DoFnReflector underTest(DoFnWithContext<String, String> fn) {
+ private DoFnReflector underTest(DoFn<String, String> fn) {
this.fn = fn;
return DoFnReflector.of(fn.getClass());
}
@@ -141,7 +141,7 @@ public class DoFnReflectorTest {
@Test
public void testDoFnWithNoExtraContext() throws Exception {
final Invocations invocations = new Invocations("AnonymousClass");
- DoFnReflector reflector = underTest(new DoFnWithContext<String, String>() {
+ DoFnReflector reflector = underTest(new DoFn<String, String>() {
@ProcessElement
public void processElement(ProcessContext c)
@@ -172,19 +172,19 @@ public class DoFnReflectorTest {
interface InterfaceWithProcessElement {
@ProcessElement
- void processElement(DoFnWithContext<String, String>.ProcessContext c);
+ void processElement(DoFn<String, String>.ProcessContext c);
}
interface LayersOfInterfaces extends InterfaceWithProcessElement {}
private class IdentityUsingInterfaceWithProcessElement
- extends DoFnWithContext<String, String>
+ extends DoFn<String, String>
implements LayersOfInterfaces {
private Invocations invocations = new Invocations("Named Class");
@Override
- public void processElement(DoFnWithContext<String, String>.ProcessContext c) {
+ public void processElement(DoFn<String, String>.ProcessContext c) {
invocations.wasProcessElementInvoked = true;
assertSame(c, mockContext);
}
@@ -198,7 +198,7 @@ public class DoFnReflectorTest {
checkInvokeProcessElementWorks(reflector, fn.invocations);
}
- private class IdentityParent extends DoFnWithContext<String, String> {
+ private class IdentityParent extends DoFn<String, String> {
protected Invocations parentInvocations = new Invocations("IdentityParent");
@ProcessElement
@@ -215,7 +215,7 @@ public class DoFnReflectorTest {
protected Invocations childInvocations = new Invocations("IdentityChildWithOverride");
@Override
- public void process(DoFnWithContext<String, String>.ProcessContext c) {
+ public void process(DoFn<String, String>.ProcessContext c) {
super.process(c);
childInvocations.wasProcessElementInvoked = true;
}
@@ -240,7 +240,7 @@ public class DoFnReflectorTest {
@Test
public void testDoFnWithWindow() throws Exception {
final Invocations invocations = new Invocations("AnonymousClass");
- DoFnReflector reflector = underTest(new DoFnWithContext<String, String>() {
+ DoFnReflector reflector = underTest(new DoFn<String, String>() {
@ProcessElement
public void processElement(ProcessContext c, BoundedWindow w)
@@ -259,7 +259,7 @@ public class DoFnReflectorTest {
@Test
public void testDoFnWithWindowingInternals() throws Exception {
final Invocations invocations = new Invocations("AnonymousClass");
- DoFnReflector reflector = underTest(new DoFnWithContext<String, String>() {
+ DoFnReflector reflector = underTest(new DoFn<String, String>() {
@ProcessElement
public void processElement(ProcessContext c, WindowingInternals<String, String> w)
@@ -278,7 +278,7 @@ public class DoFnReflectorTest {
@Test
public void testDoFnWithStartBundle() throws Exception {
final Invocations invocations = new Invocations("AnonymousClass");
- DoFnReflector reflector = underTest(new DoFnWithContext<String, String>() {
+ DoFnReflector reflector = underTest(new DoFn<String, String>() {
@ProcessElement
public void processElement(@SuppressWarnings("unused") ProcessContext c) {}
@@ -304,7 +304,7 @@ public class DoFnReflectorTest {
thrown.expect(IllegalStateException.class);
thrown.expectMessage("No method annotated with @ProcessElement found");
thrown.expectMessage(getClass().getName() + "$");
- underTest(new DoFnWithContext<String, String>() {});
+ underTest(new DoFn<String, String>() {});
}
@Test
@@ -314,7 +314,7 @@ public class DoFnReflectorTest {
thrown.expectMessage("foo()");
thrown.expectMessage("bar()");
thrown.expectMessage(getClass().getName() + "$");
- underTest(new DoFnWithContext<String, String>() {
+ underTest(new DoFn<String, String>() {
@ProcessElement
public void foo() {}
@@ -330,7 +330,7 @@ public class DoFnReflectorTest {
thrown.expectMessage("bar()");
thrown.expectMessage("baz()");
thrown.expectMessage(getClass().getName() + "$");
- underTest(new DoFnWithContext<String, String>() {
+ underTest(new DoFn<String, String>() {
@ProcessElement
public void foo() {}
@@ -349,7 +349,7 @@ public class DoFnReflectorTest {
thrown.expectMessage("bar()");
thrown.expectMessage("baz()");
thrown.expectMessage(getClass().getName() + "$");
- underTest(new DoFnWithContext<String, String>() {
+ underTest(new DoFn<String, String>() {
@ProcessElement
public void foo() {}
@@ -361,7 +361,7 @@ public class DoFnReflectorTest {
});
}
- private static class PrivateDoFnClass extends DoFnWithContext<String, String> {
+ private static class PrivateDoFnClass extends DoFn<String, String> {
final Invocations invocations = new Invocations(getClass().getName());
@ProcessElement
@@ -429,7 +429,7 @@ public class DoFnReflectorTest {
thrown.expect(IllegalStateException.class);
thrown.expectMessage("process() must be public");
thrown.expectMessage(getClass().getName() + "$");
- underTest(new DoFnWithContext<String, String>() {
+ underTest(new DoFn<String, String>() {
@ProcessElement
private void process() {}
});
@@ -440,7 +440,7 @@ public class DoFnReflectorTest {
thrown.expect(IllegalStateException.class);
thrown.expectMessage("startBundle() must be public");
thrown.expectMessage(getClass().getName() + "$");
- underTest(new DoFnWithContext<String, String>() {
+ underTest(new DoFn<String, String>() {
@ProcessElement
public void processElement() {}
@@ -454,7 +454,7 @@ public class DoFnReflectorTest {
thrown.expect(IllegalStateException.class);
thrown.expectMessage("finishBundle() must be public");
thrown.expectMessage(getClass().getName() + "$");
- underTest(new DoFnWithContext<String, String>() {
+ underTest(new DoFn<String, String>() {
@ProcessElement
public void processElement() {}
@@ -490,7 +490,7 @@ public class DoFnReflectorTest {
}
@SuppressWarnings({"unused"})
- private void badExtraContext(DoFnWithContext<Integer, String>.Context c, int n) {}
+ private void badExtraContext(DoFn<Integer, String>.Context c, int n) {}
@Test
public void testBadExtraContext() throws Exception {
@@ -505,7 +505,7 @@ public class DoFnReflectorTest {
@SuppressWarnings({"unused"})
private void badExtraProcessContext(
- DoFnWithContext<Integer, String>.ProcessContext c, Integer n) {}
+ DoFn<Integer, String>.ProcessContext c, Integer n) {}
@Test
public void testBadExtraProcessContextType() throws Exception {
@@ -534,58 +534,58 @@ public class DoFnReflectorTest {
}
@SuppressWarnings("unused")
- private void goodGenerics(DoFnWithContext<Integer, String>.ProcessContext c,
+ private void goodGenerics(DoFn<Integer, String>.ProcessContext c,
WindowingInternals<Integer, String> i1) {}
@Test
public void testValidGenerics() throws Exception {
Method method = getClass().getDeclaredMethod("goodGenerics",
- DoFnWithContext.ProcessContext.class, WindowingInternals.class);
+ DoFn.ProcessContext.class, WindowingInternals.class);
DoFnReflector.verifyProcessMethodArguments(method);
}
@SuppressWarnings("unused")
- private void goodWildcards(DoFnWithContext<Integer, String>.ProcessContext c,
+ private void goodWildcards(DoFn<Integer, String>.ProcessContext c,
WindowingInternals<?, ?> i1) {}
@Test
public void testGoodWildcards() throws Exception {
Method method = getClass().getDeclaredMethod("goodWildcards",
- DoFnWithContext.ProcessContext.class, WindowingInternals.class);
+ DoFn.ProcessContext.class, WindowingInternals.class);
DoFnReflector.verifyProcessMethodArguments(method);
}
@SuppressWarnings("unused")
- private void goodBoundedWildcards(DoFnWithContext<Integer, String>.ProcessContext c,
+ private void goodBoundedWildcards(DoFn<Integer, String>.ProcessContext c,
WindowingInternals<? super Integer, ? super String> i1) {}
@Test
public void testGoodBoundedWildcards() throws Exception {
Method method = getClass().getDeclaredMethod("goodBoundedWildcards",
- DoFnWithContext.ProcessContext.class, WindowingInternals.class);
+ DoFn.ProcessContext.class, WindowingInternals.class);
DoFnReflector.verifyProcessMethodArguments(method);
}
@SuppressWarnings("unused")
private <InputT, OutputT> void goodTypeVariables(
- DoFnWithContext<InputT, OutputT>.ProcessContext c,
+ DoFn<InputT, OutputT>.ProcessContext c,
WindowingInternals<InputT, OutputT> i1) {}
@Test
public void testGoodTypeVariables() throws Exception {
Method method = getClass().getDeclaredMethod("goodTypeVariables",
- DoFnWithContext.ProcessContext.class, WindowingInternals.class);
+ DoFn.ProcessContext.class, WindowingInternals.class);
DoFnReflector.verifyProcessMethodArguments(method);
}
@SuppressWarnings("unused")
- private void badGenericTwoArgs(DoFnWithContext<Integer, String>.ProcessContext c,
+ private void badGenericTwoArgs(DoFn<Integer, String>.ProcessContext c,
WindowingInternals<Integer, Integer> i1) {}
@Test
public void testBadGenericsTwoArgs() throws Exception {
Method method = getClass().getDeclaredMethod("badGenericTwoArgs",
- DoFnWithContext.ProcessContext.class, WindowingInternals.class);
+ DoFn.ProcessContext.class, WindowingInternals.class);
thrown.expect(IllegalStateException.class);
thrown.expectMessage("Incompatible generics in context parameter "
@@ -598,13 +598,13 @@ public class DoFnReflectorTest {
}
@SuppressWarnings("unused")
- private void badGenericWildCards(DoFnWithContext<Integer, String>.ProcessContext c,
+ private void badGenericWildCards(DoFn<Integer, String>.ProcessContext c,
WindowingInternals<Integer, ? super Integer> i1) {}
@Test
public void testBadGenericWildCards() throws Exception {
Method method = getClass().getDeclaredMethod("badGenericWildCards",
- DoFnWithContext.ProcessContext.class, WindowingInternals.class);
+ DoFn.ProcessContext.class, WindowingInternals.class);
thrown.expect(IllegalStateException.class);
thrown.expectMessage("Incompatible generics in context parameter "
@@ -617,13 +617,13 @@ public class DoFnReflectorTest {
}
@SuppressWarnings("unused")
- private <InputT, OutputT> void badTypeVariables(DoFnWithContext<InputT, OutputT>.ProcessContext c,
+ private <InputT, OutputT> void badTypeVariables(DoFn<InputT, OutputT>.ProcessContext c,
WindowingInternals<InputT, InputT> i1) {}
@Test
public void testBadTypeVariables() throws Exception {
Method method = getClass().getDeclaredMethod("badTypeVariables",
- DoFnWithContext.ProcessContext.class, WindowingInternals.class);
+ DoFn.ProcessContext.class, WindowingInternals.class);
thrown.expect(IllegalStateException.class);
thrown.expectMessage("Incompatible generics in context parameter "
@@ -636,7 +636,7 @@ public class DoFnReflectorTest {
@Test
public void testProcessElementException() throws Exception {
- DoFnWithContext<Integer, Integer> fn = new DoFnWithContext<Integer, Integer>() {
+ DoFn<Integer, Integer> fn = new DoFn<Integer, Integer>() {
@ProcessElement
public void processElement(@SuppressWarnings("unused") ProcessContext c) {
throw new IllegalArgumentException("bogus");
@@ -650,7 +650,7 @@ public class DoFnReflectorTest {
@Test
public void testStartBundleException() throws Exception {
- DoFnWithContext<Integer, Integer> fn = new DoFnWithContext<Integer, Integer>() {
+ DoFn<Integer, Integer> fn = new DoFn<Integer, Integer>() {
@StartBundle
public void startBundle(@SuppressWarnings("unused") Context c) {
throw new IllegalArgumentException("bogus");
@@ -668,7 +668,7 @@ public class DoFnReflectorTest {
@Test
public void testFinishBundleException() throws Exception {
- DoFnWithContext<Integer, Integer> fn = new DoFnWithContext<Integer, Integer>() {
+ DoFn<Integer, Integer> fn = new DoFn<Integer, Integer>() {
@FinishBundle
public void finishBundle(@SuppressWarnings("unused") Context c) {
throw new IllegalArgumentException("bogus");
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java
new file mode 100644
index 0000000..c7e8972
--- /dev/null
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java
@@ -0,0 +1,237 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.transforms;
+
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.isA;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import org.apache.beam.sdk.Pipeline.PipelineExecutionException;
+import org.apache.beam.sdk.testing.NeedsRunner;
+import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.Combine.CombineFn;
+import org.apache.beam.sdk.transforms.Max.MaxIntegerFn;
+import org.apache.beam.sdk.transforms.display.DisplayData;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.io.Serializable;
+
+/** Tests for {@link DoFn}. */
+@RunWith(JUnit4.class)
+public class DoFnTest implements Serializable {
+ @Rule
+ public transient ExpectedException thrown = ExpectedException.none();
+
+ private class NoOpDoFn extends DoFn<Void, Void> {
+
+ /**
+ * @param c context
+ */
+ @ProcessElement
+ public void processElement(ProcessContext c) {
+ }
+ }
+
+ @Test
+ public void testCreateAggregatorWithCombinerSucceeds() {
+ String name = "testAggregator";
+ Sum.SumLongFn combiner = new Sum.SumLongFn();
+
+ DoFn<Void, Void> doFn = new NoOpDoFn();
+
+ Aggregator<Long, Long> aggregator = doFn.createAggregator(name, combiner);
+
+ assertEquals(name, aggregator.getName());
+ assertEquals(combiner, aggregator.getCombineFn());
+ }
+
+ @Test
+ public void testCreateAggregatorWithNullNameThrowsException() {
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("name cannot be null");
+
+ DoFn<Void, Void> doFn = new NoOpDoFn();
+
+ doFn.createAggregator(null, new Sum.SumLongFn());
+ }
+
+ @Test
+ public void testCreateAggregatorWithNullCombineFnThrowsException() {
+ CombineFn<Object, Object, Object> combiner = null;
+
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("combiner cannot be null");
+
+ DoFn<Void, Void> doFn = new NoOpDoFn();
+
+ doFn.createAggregator("testAggregator", combiner);
+ }
+
+ @Test
+ public void testCreateAggregatorWithNullSerializableFnThrowsException() {
+ SerializableFunction<Iterable<Object>, Object> combiner = null;
+
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("combiner cannot be null");
+
+ DoFn<Void, Void> doFn = new NoOpDoFn();
+
+ doFn.createAggregator("testAggregator", combiner);
+ }
+
+ @Test
+ public void testCreateAggregatorWithSameNameThrowsException() {
+ String name = "testAggregator";
+ CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
+
+ DoFn<Void, Void> doFn = new NoOpDoFn();
+
+ doFn.createAggregator(name, combiner);
+
+ thrown.expect(IllegalArgumentException.class);
+ thrown.expectMessage("Cannot create");
+ thrown.expectMessage(name);
+ thrown.expectMessage("already exists");
+
+ doFn.createAggregator(name, combiner);
+ }
+
+ @Test
+ public void testCreateAggregatorsWithDifferentNamesSucceeds() {
+ String nameOne = "testAggregator";
+ String nameTwo = "aggregatorPrime";
+ CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
+
+ DoFn<Void, Void> doFn = new NoOpDoFn();
+
+ Aggregator<Double, Double> aggregatorOne =
+ doFn.createAggregator(nameOne, combiner);
+ Aggregator<Double, Double> aggregatorTwo =
+ doFn.createAggregator(nameTwo, combiner);
+
+ assertNotEquals(aggregatorOne, aggregatorTwo);
+ }
+
+ @Test
+ public void testDoFnWithContextUsingAggregators() {
+ NoOpOldDoFn<Object, Object> noOpFn = new NoOpOldDoFn<>();
+ OldDoFn<Object, Object>.Context context = noOpFn.context();
+
+ OldDoFn<Object, Object> fn = spy(noOpFn);
+ context = spy(context);
+
+ @SuppressWarnings("unchecked")
+ Aggregator<Long, Long> agg = mock(Aggregator.class);
+
+ Sum.SumLongFn combiner = new Sum.SumLongFn();
+ Aggregator<Long, Long> delegateAggregator =
+ fn.createAggregator("test", combiner);
+
+ when(context.createAggregatorInternal("test", combiner)).thenReturn(agg);
+
+ context.setupDelegateAggregators();
+ delegateAggregator.addValue(1L);
+
+ verify(agg).addValue(1L);
+ }
+
+ @Test
+ public void testDefaultPopulateDisplayDataImplementation() {
+ DoFn<String, String> fn = new DoFn<String, String>() {
+ };
+ DisplayData displayData = DisplayData.from(fn);
+ assertThat(displayData.items(), empty());
+ }
+
+ @Test
+ @Category(NeedsRunner.class)
+ public void testCreateAggregatorInStartBundleThrows() {
+ TestPipeline p = createTestPipeline(new DoFn<String, String>() {
+ @StartBundle
+ public void startBundle(Context c) {
+ createAggregator("anyAggregate", new MaxIntegerFn());
+ }
+
+ @ProcessElement
+ public void processElement(ProcessContext c) {}
+ });
+
+ thrown.expect(PipelineExecutionException.class);
+ thrown.expectCause(isA(IllegalStateException.class));
+
+ p.run();
+ }
+
+ @Test
+ @Category(NeedsRunner.class)
+ public void testCreateAggregatorInProcessElementThrows() {
+ TestPipeline p = createTestPipeline(new DoFn<String, String>() {
+ @ProcessElement
+ public void processElement(ProcessContext c) {
+ createAggregator("anyAggregate", new MaxIntegerFn());
+ }
+ });
+
+ thrown.expect(PipelineExecutionException.class);
+ thrown.expectCause(isA(IllegalStateException.class));
+
+ p.run();
+ }
+
+ @Test
+ @Category(NeedsRunner.class)
+ public void testCreateAggregatorInFinishBundleThrows() {
+ TestPipeline p = createTestPipeline(new DoFn<String, String>() {
+ @FinishBundle
+ public void finishBundle(Context c) {
+ createAggregator("anyAggregate", new MaxIntegerFn());
+ }
+
+ @ProcessElement
+ public void processElement(ProcessContext c) {}
+ });
+
+ thrown.expect(PipelineExecutionException.class);
+ thrown.expectCause(isA(IllegalStateException.class));
+
+ p.run();
+ }
+
+ /**
+ * Initialize a test pipeline with the specified {@link OldDoFn}.
+ */
+ private <InputT, OutputT> TestPipeline createTestPipeline(DoFn<InputT, OutputT> fn) {
+ TestPipeline pipeline = TestPipeline.create();
+ pipeline.apply(Create.of((InputT) null))
+ .apply(ParDo.of(fn));
+
+ return pipeline;
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3bcb6f46/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
deleted file mode 100644
index 0a910b8..0000000
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
+++ /dev/null
@@ -1,237 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.transforms;
-
-import static org.hamcrest.Matchers.empty;
-import static org.hamcrest.Matchers.isA;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertThat;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import org.apache.beam.sdk.Pipeline.PipelineExecutionException;
-import org.apache.beam.sdk.testing.NeedsRunner;
-import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.Max.MaxIntegerFn;
-import org.apache.beam.sdk.transforms.display.DisplayData;
-
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-import java.io.Serializable;
-
-/** Tests for {@link DoFnWithContext}. */
-@RunWith(JUnit4.class)
-public class DoFnWithContextTest implements Serializable {
- @Rule
- public transient ExpectedException thrown = ExpectedException.none();
-
- private class NoOpDoFnWithContext extends DoFnWithContext<Void, Void> {
-
- /**
- * @param c context
- */
- @ProcessElement
- public void processElement(ProcessContext c) {
- }
- }
-
- @Test
- public void testCreateAggregatorWithCombinerSucceeds() {
- String name = "testAggregator";
- Sum.SumLongFn combiner = new Sum.SumLongFn();
-
- DoFnWithContext<Void, Void> doFn = new NoOpDoFnWithContext();
-
- Aggregator<Long, Long> aggregator = doFn.createAggregator(name, combiner);
-
- assertEquals(name, aggregator.getName());
- assertEquals(combiner, aggregator.getCombineFn());
- }
-
- @Test
- public void testCreateAggregatorWithNullNameThrowsException() {
- thrown.expect(NullPointerException.class);
- thrown.expectMessage("name cannot be null");
-
- DoFnWithContext<Void, Void> doFn = new NoOpDoFnWithContext();
-
- doFn.createAggregator(null, new Sum.SumLongFn());
- }
-
- @Test
- public void testCreateAggregatorWithNullCombineFnThrowsException() {
- CombineFn<Object, Object, Object> combiner = null;
-
- thrown.expect(NullPointerException.class);
- thrown.expectMessage("combiner cannot be null");
-
- DoFnWithContext<Void, Void> doFn = new NoOpDoFnWithContext();
-
- doFn.createAggregator("testAggregator", combiner);
- }
-
- @Test
- public void testCreateAggregatorWithNullSerializableFnThrowsException() {
- SerializableFunction<Iterable<Object>, Object> combiner = null;
-
- thrown.expect(NullPointerException.class);
- thrown.expectMessage("combiner cannot be null");
-
- DoFnWithContext<Void, Void> doFn = new NoOpDoFnWithContext();
-
- doFn.createAggregator("testAggregator", combiner);
- }
-
- @Test
- public void testCreateAggregatorWithSameNameThrowsException() {
- String name = "testAggregator";
- CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
-
- DoFnWithContext<Void, Void> doFn = new NoOpDoFnWithContext();
-
- doFn.createAggregator(name, combiner);
-
- thrown.expect(IllegalArgumentException.class);
- thrown.expectMessage("Cannot create");
- thrown.expectMessage(name);
- thrown.expectMessage("already exists");
-
- doFn.createAggregator(name, combiner);
- }
-
- @Test
- public void testCreateAggregatorsWithDifferentNamesSucceeds() {
- String nameOne = "testAggregator";
- String nameTwo = "aggregatorPrime";
- CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
-
- DoFnWithContext<Void, Void> doFn = new NoOpDoFnWithContext();
-
- Aggregator<Double, Double> aggregatorOne =
- doFn.createAggregator(nameOne, combiner);
- Aggregator<Double, Double> aggregatorTwo =
- doFn.createAggregator(nameTwo, combiner);
-
- assertNotEquals(aggregatorOne, aggregatorTwo);
- }
-
- @Test
- public void testDoFnWithContextUsingAggregators() {
- NoOpOldDoFn<Object, Object> noOpFn = new NoOpOldDoFn<>();
- OldDoFn<Object, Object>.Context context = noOpFn.context();
-
- OldDoFn<Object, Object> fn = spy(noOpFn);
- context = spy(context);
-
- @SuppressWarnings("unchecked")
- Aggregator<Long, Long> agg = mock(Aggregator.class);
-
- Sum.SumLongFn combiner = new Sum.SumLongFn();
- Aggregator<Long, Long> delegateAggregator =
- fn.createAggregator("test", combiner);
-
- when(context.createAggregatorInternal("test", combiner)).thenReturn(agg);
-
- context.setupDelegateAggregators();
- delegateAggregator.addValue(1L);
-
- verify(agg).addValue(1L);
- }
-
- @Test
- public void testDefaultPopulateDisplayDataImplementation() {
- DoFnWithContext<String, String> fn = new DoFnWithContext<String, String>() {
- };
- DisplayData displayData = DisplayData.from(fn);
- assertThat(displayData.items(), empty());
- }
-
- @Test
- @Category(NeedsRunner.class)
- public void testCreateAggregatorInStartBundleThrows() {
- TestPipeline p = createTestPipeline(new DoFnWithContext<String, String>() {
- @StartBundle
- public void startBundle(Context c) {
- createAggregator("anyAggregate", new MaxIntegerFn());
- }
-
- @ProcessElement
- public void processElement(ProcessContext c) {}
- });
-
- thrown.expect(PipelineExecutionException.class);
- thrown.expectCause(isA(IllegalStateException.class));
-
- p.run();
- }
-
- @Test
- @Category(NeedsRunner.class)
- public void testCreateAggregatorInProcessElementThrows() {
- TestPipeline p = createTestPipeline(new DoFnWithContext<String, String>() {
- @ProcessElement
- public void processElement(ProcessContext c) {
- createAggregator("anyAggregate", new MaxIntegerFn());
- }
- });
-
- thrown.expect(PipelineExecutionException.class);
- thrown.expectCause(isA(IllegalStateException.class));
-
- p.run();
- }
-
- @Test
- @Category(NeedsRunner.class)
- public void testCreateAggregatorInFinishBundleThrows() {
- TestPipeline p = createTestPipeline(new DoFnWithContext<String, String>() {
- @FinishBundle
- public void finishBundle(Context c) {
- createAggregator("anyAggregate", new MaxIntegerFn());
- }
-
- @ProcessElement
- public void processElement(ProcessContext c) {}
- });
-
- thrown.expect(PipelineExecutionException.class);
- thrown.expectCause(isA(IllegalStateException.class));
-
- p.run();
- }
-
- /**
- * Initialize a test pipeline with the specified {@link OldDoFn}.
- */
- private <InputT, OutputT> TestPipeline createTestPipeline(DoFnWithContext<InputT, OutputT> fn) {
- TestPipeline pipeline = TestPipeline.create();
- pipeline.apply(Create.of((InputT) null))
- .apply(ParDo.of(fn));
-
- return pipeline;
- }
-}
[04/19] incubator-beam git commit: Rename NoOpDoFn to NoOpOldDoFn
Posted by dh...@apache.org.
Rename NoOpDoFn to NoOpOldDoFn
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/e1609664
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/e1609664
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/e1609664
Branch: refs/heads/master
Commit: e16096648fa92e72822dc02dd1958f5e53fd6fa0
Parents: a64baf4
Author: Kenneth Knowles <kl...@google.com>
Authored: Mon Jul 25 21:27:02 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:52 2016 -0700
----------------------------------------------------------------------
.../DoFnDelegatingAggregatorTest.java | 10 +-
.../sdk/transforms/DoFnWithContextTest.java | 2 +-
.../apache/beam/sdk/transforms/NoOpDoFn.java | 144 -------------------
.../apache/beam/sdk/transforms/NoOpOldDoFn.java | 144 +++++++++++++++++++
.../beam/sdk/transforms/OldDoFnContextTest.java | 2 +-
.../apache/beam/sdk/transforms/OldDoFnTest.java | 12 +-
6 files changed, 157 insertions(+), 157 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/e1609664/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
index 2488042..4e8d06c 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
@@ -54,7 +54,7 @@ public class DoFnDelegatingAggregatorTest {
@Test
public void testAddValueWithoutDelegateThrowsException() {
- OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpOldDoFn<>();
String name = "agg";
CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
@@ -74,7 +74,7 @@ public class DoFnDelegatingAggregatorTest {
String name = "agg";
CombineFn<Long, ?, Long> combiner = mockCombineFn(Long.class);
- OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpOldDoFn<>();
DelegatingAggregator<Long, Long> aggregator =
(DelegatingAggregator<Long, Long>) doFn.createAggregator(name, combiner);
@@ -91,7 +91,7 @@ public class DoFnDelegatingAggregatorTest {
String name = "agg";
CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
- OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpOldDoFn<>();
DelegatingAggregator<Double, Double> aggregator =
(DelegatingAggregator<Double, Double>) doFn.createAggregator(name, combiner);
@@ -114,7 +114,7 @@ public class DoFnDelegatingAggregatorTest {
String name = "agg";
CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
- OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpOldDoFn<>();
DelegatingAggregator<Double, Double> aggregator =
(DelegatingAggregator<Double, Double>) doFn.createAggregator(name, combiner);
@@ -127,7 +127,7 @@ public class DoFnDelegatingAggregatorTest {
String name = "agg";
CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
- OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpOldDoFn<>();
DelegatingAggregator<Double, Double> aggregator =
(DelegatingAggregator<Double, Double>) doFn.createAggregator(name, combiner);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/e1609664/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
index 8b00c03..0a910b8 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
@@ -141,7 +141,7 @@ public class DoFnWithContextTest implements Serializable {
@Test
public void testDoFnWithContextUsingAggregators() {
- NoOpDoFn<Object, Object> noOpFn = new NoOpDoFn<>();
+ NoOpOldDoFn<Object, Object> noOpFn = new NoOpOldDoFn<>();
OldDoFn<Object, Object>.Context context = noOpFn.context();
OldDoFn<Object, Object> fn = spy(noOpFn);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/e1609664/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java
deleted file mode 100644
index 5c43755..0000000
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.transforms;
-
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.transforms.windowing.PaneInfo;
-import org.apache.beam.sdk.util.WindowingInternals;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-
-import org.joda.time.Instant;
-
-/**
- * A {@link OldDoFn} that does nothing with provided elements. Used for testing
- * methods provided by the OldDoFn abstract class.
- *
- * @param <InputT> unused.
- * @param <OutputT> unused.
- */
-class NoOpDoFn<InputT, OutputT> extends OldDoFn<InputT, OutputT> {
- @Override
- public void processElement(OldDoFn<InputT, OutputT>.ProcessContext c) throws Exception {
- }
-
- /**
- * Returns a new NoOp Context.
- */
- public OldDoFn<InputT, OutputT>.Context context() {
- return new NoOpDoFnContext();
- }
-
- /**
- * Returns a new NoOp Process Context.
- */
- public OldDoFn<InputT, OutputT>.ProcessContext processContext() {
- return new NoOpDoFnProcessContext();
- }
-
- /**
- * A {@link OldDoFn.Context} that does nothing and returns exclusively null.
- */
- private class NoOpDoFnContext extends OldDoFn<InputT, OutputT>.Context {
- @Override
- public PipelineOptions getPipelineOptions() {
- return null;
- }
- @Override
- public void output(OutputT output) {
- }
- @Override
- public void outputWithTimestamp(OutputT output, Instant timestamp) {
- }
- @Override
- public <T> void sideOutput(TupleTag<T> tag, T output) {
- }
- @Override
- public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output,
- Instant timestamp) {
- }
- @Override
- protected <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
- createAggregatorInternal(String name, CombineFn<AggInputT, ?, AggOutputT> combiner) {
- return null;
- }
- }
-
- /**
- * A {@link OldDoFn.ProcessContext} that does nothing and returns exclusively
- * null.
- */
- private class NoOpDoFnProcessContext extends OldDoFn<InputT, OutputT>.ProcessContext {
- @Override
- public InputT element() {
- return null;
- }
-
- @Override
- public <T> T sideInput(PCollectionView<T> view) {
- return null;
- }
-
- @Override
- public Instant timestamp() {
- return null;
- }
-
- @Override
- public BoundedWindow window() {
- return null;
- }
-
- @Override
- public PaneInfo pane() {
- return null;
- }
-
- @Override
- public WindowingInternals<InputT, OutputT> windowingInternals() {
- return null;
- }
-
- @Override
- public PipelineOptions getPipelineOptions() {
- return null;
- }
-
- @Override
- public void output(OutputT output) {}
-
- @Override
- public void outputWithTimestamp(OutputT output, Instant timestamp) {}
-
- @Override
- public <T> void sideOutput(TupleTag<T> tag, T output) {}
-
- @Override
- public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output,
- Instant timestamp) {}
-
- @Override
- protected <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
- createAggregatorInternal(String name, CombineFn<AggInputT, ?, AggOutputT> combiner) {
- return null;
- }
-
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/e1609664/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpOldDoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpOldDoFn.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpOldDoFn.java
new file mode 100644
index 0000000..8f2bd5e
--- /dev/null
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpOldDoFn.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.transforms;
+
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.transforms.Combine.CombineFn;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.transforms.windowing.PaneInfo;
+import org.apache.beam.sdk.util.WindowingInternals;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+
+import org.joda.time.Instant;
+
+/**
+ * A {@link OldDoFn} that does nothing with provided elements. Used for testing
+ * methods provided by the OldDoFn abstract class.
+ *
+ * @param <InputT> unused.
+ * @param <OutputT> unused.
+ */
+class NoOpOldDoFn<InputT, OutputT> extends OldDoFn<InputT, OutputT> {
+ @Override
+ public void processElement(OldDoFn<InputT, OutputT>.ProcessContext c) throws Exception {
+ }
+
+ /**
+ * Returns a new NoOp Context.
+ */
+ public OldDoFn<InputT, OutputT>.Context context() {
+ return new NoOpDoFnContext();
+ }
+
+ /**
+ * Returns a new NoOp Process Context.
+ */
+ public OldDoFn<InputT, OutputT>.ProcessContext processContext() {
+ return new NoOpDoFnProcessContext();
+ }
+
+ /**
+ * A {@link OldDoFn.Context} that does nothing and returns exclusively null.
+ */
+ private class NoOpDoFnContext extends OldDoFn<InputT, OutputT>.Context {
+ @Override
+ public PipelineOptions getPipelineOptions() {
+ return null;
+ }
+ @Override
+ public void output(OutputT output) {
+ }
+ @Override
+ public void outputWithTimestamp(OutputT output, Instant timestamp) {
+ }
+ @Override
+ public <T> void sideOutput(TupleTag<T> tag, T output) {
+ }
+ @Override
+ public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output,
+ Instant timestamp) {
+ }
+ @Override
+ protected <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
+ createAggregatorInternal(String name, CombineFn<AggInputT, ?, AggOutputT> combiner) {
+ return null;
+ }
+ }
+
+ /**
+ * A {@link OldDoFn.ProcessContext} that does nothing and returns exclusively
+ * null.
+ */
+ private class NoOpDoFnProcessContext extends OldDoFn<InputT, OutputT>.ProcessContext {
+ @Override
+ public InputT element() {
+ return null;
+ }
+
+ @Override
+ public <T> T sideInput(PCollectionView<T> view) {
+ return null;
+ }
+
+ @Override
+ public Instant timestamp() {
+ return null;
+ }
+
+ @Override
+ public BoundedWindow window() {
+ return null;
+ }
+
+ @Override
+ public PaneInfo pane() {
+ return null;
+ }
+
+ @Override
+ public WindowingInternals<InputT, OutputT> windowingInternals() {
+ return null;
+ }
+
+ @Override
+ public PipelineOptions getPipelineOptions() {
+ return null;
+ }
+
+ @Override
+ public void output(OutputT output) {}
+
+ @Override
+ public void outputWithTimestamp(OutputT output, Instant timestamp) {}
+
+ @Override
+ public <T> void sideOutput(TupleTag<T> tag, T output) {}
+
+ @Override
+ public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output,
+ Instant timestamp) {}
+
+ @Override
+ protected <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
+ createAggregatorInternal(String name, CombineFn<AggInputT, ?, AggOutputT> combiner) {
+ return null;
+ }
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/e1609664/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java
index 9234ccb..b2d4aed 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java
@@ -46,7 +46,7 @@ public class OldDoFnContextTest {
// Need to be real objects to call the constructor, and to reference the
// outer instance of OldDoFn
- NoOpDoFn<Object, Object> noOpFn = new NoOpDoFn<>();
+ NoOpOldDoFn<Object, Object> noOpFn = new NoOpOldDoFn<>();
OldDoFn<Object, Object>.Context noOpContext = noOpFn.context();
fn = spy(noOpFn);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/e1609664/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java
index 49f4366..9d144b3 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java
@@ -60,7 +60,7 @@ public class OldDoFnTest implements Serializable {
String name = "testAggregator";
Sum.SumLongFn combiner = new Sum.SumLongFn();
- OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpOldDoFn<>();
Aggregator<Long, Long> aggregator = doFn.createAggregator(name, combiner);
@@ -73,7 +73,7 @@ public class OldDoFnTest implements Serializable {
thrown.expect(NullPointerException.class);
thrown.expectMessage("name cannot be null");
- OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpOldDoFn<>();
doFn.createAggregator(null, new Sum.SumLongFn());
}
@@ -85,7 +85,7 @@ public class OldDoFnTest implements Serializable {
thrown.expect(NullPointerException.class);
thrown.expectMessage("combiner cannot be null");
- OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpOldDoFn<>();
doFn.createAggregator("testAggregator", combiner);
}
@@ -97,7 +97,7 @@ public class OldDoFnTest implements Serializable {
thrown.expect(NullPointerException.class);
thrown.expectMessage("combiner cannot be null");
- OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpOldDoFn<>();
doFn.createAggregator("testAggregator", combiner);
}
@@ -107,7 +107,7 @@ public class OldDoFnTest implements Serializable {
String name = "testAggregator";
CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
- OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpOldDoFn<>();
doFn.createAggregator(name, combiner);
@@ -125,7 +125,7 @@ public class OldDoFnTest implements Serializable {
String nameTwo = "aggregatorPrime";
CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
- OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpOldDoFn<>();
Aggregator<Double, Double> aggregatorOne =
doFn.createAggregator(nameOne, combiner);
[11/19] incubator-beam git commit: Rename DoFn to OldDoFn
Posted by dh...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java
index 2696020..ed9ec10 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java
@@ -25,8 +25,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Aggregator.AggregatorFactory;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
import org.apache.beam.sdk.transforms.windowing.GlobalWindows;
@@ -58,15 +58,15 @@ import java.util.Set;
*/
public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<InputT, OutputT> {
- /** The DoFn being run. */
- public final DoFn<InputT, OutputT> fn;
+ /** The OldDoFn being run. */
+ public final OldDoFn<InputT, OutputT> fn;
- /** The context used for running the DoFn. */
+ /** The context used for running the OldDoFn. */
public final DoFnContext<InputT, OutputT> context;
protected DoFnRunnerBase(
PipelineOptions options,
- DoFn<InputT, OutputT> fn,
+ OldDoFn<InputT, OutputT> fn,
SideInputReader sideInputReader,
OutputManager outputManager,
TupleTag<OutputT> mainOutputTag,
@@ -145,7 +145,7 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
}
/**
- * Invokes {@link DoFn#processElement} after certain pre-processings has been done in
+ * Invokes {@link OldDoFn#processElement} after certain pre-processings has been done in
* {@link DoFnRunnerBase#processElement}.
*/
protected abstract void invokeProcessElement(WindowedValue<InputT> elem);
@@ -162,17 +162,17 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
}
/**
- * A concrete implementation of {@code DoFn.Context} used for running a {@link DoFn}.
+ * A concrete implementation of {@code OldDoFn.Context} used for running a {@link OldDoFn}.
*
- * @param <InputT> the type of the DoFn's (main) input elements
- * @param <OutputT> the type of the DoFn's (main) output elements
+ * @param <InputT> the type of the OldDoFn's (main) input elements
+ * @param <OutputT> the type of the OldDoFn's (main) output elements
*/
private static class DoFnContext<InputT, OutputT>
- extends DoFn<InputT, OutputT>.Context {
+ extends OldDoFn<InputT, OutputT>.Context {
private static final int MAX_SIDE_OUTPUTS = 1000;
final PipelineOptions options;
- final DoFn<InputT, OutputT> fn;
+ final OldDoFn<InputT, OutputT> fn;
final SideInputReader sideInputReader;
final OutputManager outputManager;
final TupleTag<OutputT> mainOutputTag;
@@ -187,7 +187,7 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
private Set<TupleTag<?>> outputTags;
public DoFnContext(PipelineOptions options,
- DoFn<InputT, OutputT> fn,
+ OldDoFn<InputT, OutputT> fn,
SideInputReader sideInputReader,
OutputManager outputManager,
TupleTag<OutputT> mainOutputTag,
@@ -317,8 +317,8 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
}
// Following implementations of output, outputWithTimestamp, and sideOutput
- // are only accessible in DoFn.startBundle and DoFn.finishBundle, and will be shadowed by
- // ProcessContext's versions in DoFn.processElement.
+ // are only accessible in OldDoFn.startBundle and OldDoFn.finishBundle, and will be shadowed by
+ // ProcessContext's versions in OldDoFn.processElement.
@Override
public void output(OutputT output) {
outputWindowedValue(output, null, null, PaneInfo.NO_FIRING);
@@ -350,9 +350,10 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
}
/**
- * Returns a new {@code DoFn.ProcessContext} for the given element.
+ * Returns a new {@code OldDoFn.ProcessContext} for the given element.
*/
- protected DoFn<InputT, OutputT>.ProcessContext createProcessContext(WindowedValue<InputT> elem) {
+ protected OldDoFn<InputT, OutputT>.ProcessContext createProcessContext(
+ WindowedValue<InputT> elem) {
return new DoFnProcessContext<InputT, OutputT>(fn, context, elem);
}
@@ -365,21 +366,21 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
}
/**
- * A concrete implementation of {@code DoFn.ProcessContext} used for
- * running a {@link DoFn} over a single element.
+ * A concrete implementation of {@code OldDoFn.ProcessContext} used for
+ * running a {@link OldDoFn} over a single element.
*
- * @param <InputT> the type of the DoFn's (main) input elements
- * @param <OutputT> the type of the DoFn's (main) output elements
+ * @param <InputT> the type of the OldDoFn's (main) input elements
+ * @param <OutputT> the type of the OldDoFn's (main) output elements
*/
static class DoFnProcessContext<InputT, OutputT>
- extends DoFn<InputT, OutputT>.ProcessContext {
+ extends OldDoFn<InputT, OutputT>.ProcessContext {
- final DoFn<InputT, OutputT> fn;
+ final OldDoFn<InputT, OutputT> fn;
final DoFnContext<InputT, OutputT> context;
final WindowedValue<InputT> windowedValue;
- public DoFnProcessContext(DoFn<InputT, OutputT> fn,
+ public DoFnProcessContext(OldDoFn<InputT, OutputT> fn,
DoFnContext<InputT, OutputT> context,
WindowedValue<InputT> windowedValue) {
fn.super();
@@ -426,7 +427,8 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
public BoundedWindow window() {
if (!(fn instanceof RequiresWindowAccess)) {
throw new UnsupportedOperationException(
- "window() is only available in the context of a DoFn marked as RequiresWindowAccess.");
+ "window() is only available in the context of a OldDoFn marked as"
+ + "RequiresWindowAccess.");
}
return Iterables.getOnlyElement(windows());
}
@@ -484,7 +486,7 @@ public abstract class DoFnRunnerBase<InputT, OutputT> implements DoFnRunner<Inpu
throw new IllegalArgumentException(String.format(
"Cannot output with timestamp %s. Output timestamps must be no earlier than the "
+ "timestamp of the current input (%s) minus the allowed skew (%s). See the "
- + "DoFn#getAllowedTimestampSkew() Javadoc for details on changing the allowed skew.",
+ + "OldDoFn#getAllowedTimestampSkew() Javadoc for details on changing the allowed skew.",
timestamp, windowedValue.getTimestamp(),
PeriodFormat.getDefault().print(fn.getAllowedTimestampSkew().toPeriod())));
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java
index cb96da2..a9f3cf4 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java
@@ -19,7 +19,7 @@ package org.apache.beam.sdk.util;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Aggregator.AggregatorFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.DoFnRunner.ReduceFnExecutor;
import org.apache.beam.sdk.util.ExecutionContext.StepContext;
@@ -27,6 +27,7 @@ import org.apache.beam.sdk.util.common.CounterSet;
import org.apache.beam.sdk.util.common.CounterSet.AddCounterMutator;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.TupleTag;
+
import java.util.List;
/**
@@ -44,13 +45,13 @@ public class DoFnRunners {
}
/**
- * Returns a basic implementation of {@link DoFnRunner} that works for most {@link DoFn DoFns}.
+ * Returns a basic implementation of {@link DoFnRunner} that works for most {@link OldDoFn DoFns}.
*
- * <p>It invokes {@link DoFn#processElement} for each input.
+ * <p>It invokes {@link OldDoFn#processElement} for each input.
*/
public static <InputT, OutputT> DoFnRunner<InputT, OutputT> simpleRunner(
PipelineOptions options,
- DoFn<InputT, OutputT> fn,
+ OldDoFn<InputT, OutputT> fn,
SideInputReader sideInputReader,
OutputManager outputManager,
TupleTag<OutputT> mainOutputTag,
@@ -71,13 +72,14 @@ public class DoFnRunners {
}
/**
- * Returns a basic implementation of {@link DoFnRunner} that works for most {@link DoFn DoFns}.
+ * Returns a basic implementation of {@link DoFnRunner} that works for most
+ * {@link OldDoFn OldDoFns}.
*
- * <p>It invokes {@link DoFn#processElement} for each input.
+ * <p>It invokes {@link OldDoFn#processElement} for each input.
*/
public static <InputT, OutputT> DoFnRunner<InputT, OutputT> simpleRunner(
PipelineOptions options,
- DoFn<InputT, OutputT> fn,
+ OldDoFn<InputT, OutputT> fn,
SideInputReader sideInputReader,
OutputManager outputManager,
TupleTag<OutputT> mainOutputTag,
@@ -99,7 +101,7 @@ public class DoFnRunners {
/**
* Returns an implementation of {@link DoFnRunner} that handles late data dropping.
*
- * <p>It drops elements from expired windows before they reach the underlying {@link DoFn}.
+ * <p>It drops elements from expired windows before they reach the underlying {@link OldDoFn}.
*/
public static <K, InputT, OutputT, W extends BoundedWindow>
DoFnRunner<KeyedWorkItem<K, InputT>, KV<K, OutputT>> lateDataDroppingRunner(
@@ -133,7 +135,7 @@ public class DoFnRunners {
/**
* Returns an implementation of {@link DoFnRunner} that handles late data dropping.
*
- * <p>It drops elements from expired windows before they reach the underlying {@link DoFn}.
+ * <p>It drops elements from expired windows before they reach the underlying {@link OldDoFn}.
*/
public static <K, InputT, OutputT, W extends BoundedWindow>
DoFnRunner<KeyedWorkItem<K, InputT>, KV<K, OutputT>> lateDataDroppingRunner(
@@ -160,7 +162,7 @@ public class DoFnRunners {
public static <InputT, OutputT> DoFnRunner<InputT, OutputT> createDefault(
PipelineOptions options,
- DoFn<InputT, OutputT> doFn,
+ OldDoFn<InputT, OutputT> doFn,
SideInputReader sideInputReader,
OutputManager outputManager,
TupleTag<OutputT> mainOutputTag,
@@ -198,7 +200,7 @@ public class DoFnRunners {
public static <InputT, OutputT> DoFnRunner<InputT, OutputT> createDefault(
PipelineOptions options,
- DoFn<InputT, OutputT> doFn,
+ OldDoFn<InputT, OutputT> doFn,
SideInputReader sideInputReader,
OutputManager outputManager,
TupleTag<OutputT> mainOutputTag,
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsDoFn.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsDoFn.java
index b575559..f82e5df 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsDoFn.java
@@ -19,14 +19,14 @@ package org.apache.beam.sdk.util;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.Sum;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.state.StateInternalsFactory;
import org.apache.beam.sdk.values.KV;
/**
- * DoFn that merges windows and groups elements in those windows, optionally
+ * OldDoFn that merges windows and groups elements in those windows, optionally
* combining values.
*
* @param <K> key type
@@ -36,7 +36,7 @@ import org.apache.beam.sdk.values.KV;
*/
@SystemDoFnInternal
public abstract class GroupAlsoByWindowsDoFn<K, InputT, OutputT, W extends BoundedWindow>
- extends DoFn<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> {
+ extends OldDoFn<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> {
public static final String DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER = "DroppedDueToClosedWindow";
public static final String DROPPED_DUE_TO_LATENESS_COUNTER = "DroppedDueToLateness";
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java
index d185a24..f872ffc 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java
@@ -17,7 +17,7 @@
*/
package org.apache.beam.sdk.util;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.state.StateInternals;
import org.apache.beam.sdk.util.state.StateInternalsFactory;
@@ -52,7 +52,7 @@ public class GroupAlsoByWindowsViaOutputBufferDoFn<K, InputT, OutputT, W extends
@Override
public void processElement(
- DoFn<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>>.ProcessContext c)
+ OldDoFn<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>>.ProcessContext c)
throws Exception {
K key = c.element().getKey();
// Used with Batch, we know that all the data is available for this key. We can't use the
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java
index 8a0152e..f0f9007 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java
@@ -22,8 +22,8 @@ import static com.google.common.base.Preconditions.checkArgument;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.IterableCoder;
import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -138,7 +138,9 @@ public class GroupByKeyViaGroupByKeyOnly<K, V>
return input
.apply(
ParDo.of(
- new DoFn<KV<K, Iterable<WindowedValue<V>>>, KV<K, Iterable<WindowedValue<V>>>>() {
+ new OldDoFn<
+ KV<K, Iterable<WindowedValue<V>>>,
+ KV<K, Iterable<WindowedValue<V>>>>() {
@Override
public void processElement(ProcessContext c) {
KV<K, Iterable<WindowedValue<V>>> kvs = c.element();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java
index 4815162..8b3ba24 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java
@@ -18,7 +18,7 @@
package org.apache.beam.sdk.util;
import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.values.KV;
@@ -31,7 +31,7 @@ import org.joda.time.Instant;
/**
* A customized {@link DoFnRunner} that handles late data dropping for
- * a {@link KeyedWorkItem} input {@link DoFn}.
+ * a {@link KeyedWorkItem} input {@link OldDoFn}.
*
* <p>It expands windows before checking data lateness.
*
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java
index 812e99a..0c5849e 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java
@@ -32,7 +32,6 @@ import org.apache.beam.sdk.util.state.ValueState;
import com.google.common.annotations.VisibleForTesting;
import org.joda.time.Instant;
-
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java
index c879409..1fa0830 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java
@@ -22,7 +22,7 @@ import static com.google.common.base.Preconditions.checkState;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
@@ -181,7 +181,7 @@ public class ReduceFnRunner<K, InputT, OutputT, W extends BoundedWindow> {
* Store the previously emitted pane (if any) for each window.
*
* <ul>
- * <li>State: The previous {@link PaneInfo} passed to the user's {@link DoFn#processElement},
+ * <li>State: The previous {@link PaneInfo} passed to the user's {@link OldDoFn#processElement},
* if any.
* <li>Style style: DIRECT
* <li>Merging: Always keyed by actual window, so does not depend on {@link #activeWindows}.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java
index e034638..a0cdb40 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java
@@ -19,21 +19,21 @@ package org.apache.beam.sdk.util;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Aggregator.AggregatorFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.util.DoFnRunners.OutputManager;
import org.apache.beam.sdk.util.ExecutionContext.StepContext;
import org.apache.beam.sdk.values.TupleTag;
import java.util.List;
/**
- * Runs a {@link DoFn} by constructing the appropriate contexts and passing them in.
+ * Runs a {@link OldDoFn} by constructing the appropriate contexts and passing them in.
*
- * @param <InputT> the type of the DoFn's (main) input elements
- * @param <OutputT> the type of the DoFn's (main) output elements
+ * @param <InputT> the type of the OldDoFn's (main) input elements
+ * @param <OutputT> the type of the OldDoFn's (main) output elements
*/
public class SimpleDoFnRunner<InputT, OutputT> extends DoFnRunnerBase<InputT, OutputT>{
- protected SimpleDoFnRunner(PipelineOptions options, DoFn<InputT, OutputT> fn,
+ protected SimpleDoFnRunner(PipelineOptions options, OldDoFn<InputT, OutputT> fn,
SideInputReader sideInputReader,
OutputManager outputManager,
TupleTag<OutputT> mainOutputTag, List<TupleTag<?>> sideOutputTags, StepContext stepContext,
@@ -44,7 +44,7 @@ public class SimpleDoFnRunner<InputT, OutputT> extends DoFnRunnerBase<InputT, Ou
@Override
protected void invokeProcessElement(WindowedValue<InputT> elem) {
- final DoFn<InputT, OutputT>.ProcessContext processContext = createProcessContext(elem);
+ final OldDoFn<InputT, OutputT>.ProcessContext processContext = createProcessContext(elem);
// This can contain user code. Wrap it in case it throws an exception.
try {
fn.processElement(processContext);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java
index 985f210..5c17009 100644
--- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java
+++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java
@@ -37,7 +37,6 @@ import org.joda.time.Duration;
import org.joda.time.Instant;
import java.io.Serializable;
-
import javax.annotation.Nullable;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java
index dc2413a..8d604cb 100644
--- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java
+++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java
@@ -21,6 +21,7 @@ import static org.apache.beam.sdk.WindowMatchers.isSingleWindowedValue;
import static org.apache.beam.sdk.WindowMatchers.isWindowedValue;
import static com.google.common.base.Preconditions.checkArgument;
+
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.emptyIterable;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java
index e0ff879..feba191 100644
--- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java
+++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.util;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
+
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java
index fb74fc6..f0c52b9 100644
--- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java
+++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java
@@ -20,7 +20,7 @@ package org.apache.beam.sdk.util;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.mock;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.util.BaseExecutionContext.StepContext;
import org.apache.beam.sdk.values.TupleTag;
@@ -62,7 +62,7 @@ public class SimpleDoFnRunnerTest {
runner.processElement(WindowedValue.valueInGlobalWindow("anyValue"));
}
- private DoFnRunner<String, String> createRunner(DoFn<String, String> fn) {
+ private DoFnRunner<String, String> createRunner(OldDoFn<String, String> fn) {
// Pass in only necessary parameters for the test
List<TupleTag<?>> sideOutputTags = Arrays.asList();
StepContext context = mock(StepContext.class);
@@ -70,7 +70,7 @@ public class SimpleDoFnRunnerTest {
null, fn, null, null, null, sideOutputTags, context, null, null);
}
- static class ThrowingDoFn extends DoFn<String, String> {
+ static class ThrowingDoFn extends OldDoFn<String, String> {
final Exception exceptionToThrow =
new UnsupportedOperationException("Expected exception");
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
index 477da30..e052226 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
@@ -23,7 +23,7 @@ import org.apache.beam.runners.direct.DirectGroupByKey.DirectGroupAlsoByWindow;
import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.transforms.AppliedPTransform;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.GroupByKeyViaGroupByKeyOnly;
@@ -106,7 +106,7 @@ class GroupAlsoByWindowEvaluatorFactory implements TransformEvaluatorFactory {
StateInternals<K> stateInternals = (StateInternals<K>) stepContext.stateInternals();
- DoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> gabwDoFn =
+ OldDoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> gabwDoFn =
GroupAlsoByWindowViaWindowSetDoFn.create(
windowingStrategy,
new ConstantStateInternalsFactory<K>(stateInternals),
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java
index dcbe3d1..8be12fd 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java
@@ -23,7 +23,7 @@ import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.CoderException;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.util.IllegalMutationException;
import org.apache.beam.sdk.util.MutationDetector;
import org.apache.beam.sdk.util.MutationDetectors;
@@ -42,7 +42,7 @@ import org.joda.time.Instant;
* elements added to the bundle will be encoded by the {@link Coder} of the underlying
* {@link PCollection}.
*
- * <p>This catches errors during the execution of a {@link DoFn} caused by modifying an element
+ * <p>This catches errors during the execution of a {@link OldDoFn} caused by modifying an element
* after it is added to an output {@link PCollection}.
*/
class ImmutabilityCheckingBundleFactory implements BundleFactory {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
index dd1cf37..6ef0ffe 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
@@ -21,7 +21,7 @@ import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext;
import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle;
import org.apache.beam.sdk.transforms.AppliedPTransform;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.util.DoFnRunner;
import org.apache.beam.sdk.util.DoFnRunners;
import org.apache.beam.sdk.util.DoFnRunners.OutputManager;
@@ -48,7 +48,7 @@ class ParDoEvaluator<T> implements TransformEvaluator<T> {
DirectStepContext stepContext,
CommittedBundle<InputT> inputBundle,
AppliedPTransform<PCollection<InputT>, ?, ?> application,
- DoFn<InputT, OutputT> fn,
+ OldDoFn<InputT, OutputT> fn,
List<PCollectionView<?>> sideInputs,
TupleTag<OutputT> mainOutputTag,
List<TupleTag<?>> sideOutputTags,
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java
index eda3db4..ce770ca 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.direct;
import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext;
import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
import org.apache.beam.sdk.transforms.AppliedPTransform;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo.BoundMulti;
import org.apache.beam.sdk.values.PCollection;
@@ -38,7 +38,7 @@ import java.util.Map;
* {@link BoundMulti} primitive {@link PTransform}.
*/
class ParDoMultiEvaluatorFactory implements TransformEvaluatorFactory {
- private final LoadingCache<AppliedPTransform<?, ?, BoundMulti<?, ?>>, ThreadLocal<DoFn<?, ?>>>
+ private final LoadingCache<AppliedPTransform<?, ?, BoundMulti<?, ?>>, ThreadLocal<OldDoFn<?, ?>>>
fnClones;
public ParDoMultiEvaluatorFactory() {
@@ -46,9 +46,10 @@ class ParDoMultiEvaluatorFactory implements TransformEvaluatorFactory {
CacheBuilder.newBuilder()
.build(
new CacheLoader<
- AppliedPTransform<?, ?, BoundMulti<?, ?>>, ThreadLocal<DoFn<?, ?>>>() {
+ AppliedPTransform<?, ?, BoundMulti<?, ?>>, ThreadLocal<OldDoFn<?, ?>>>() {
@Override
- public ThreadLocal<DoFn<?, ?>> load(AppliedPTransform<?, ?, BoundMulti<?, ?>> key)
+ public ThreadLocal<OldDoFn<?, ?>> load(
+ AppliedPTransform<?, ?, BoundMulti<?, ?>> key)
throws Exception {
@SuppressWarnings({"unchecked", "rawtypes"})
ThreadLocal threadLocal =
@@ -76,7 +77,7 @@ class ParDoMultiEvaluatorFactory implements TransformEvaluatorFactory {
Map<TupleTag<?>, PCollection<?>> outputs = application.getOutput().getAll();
@SuppressWarnings({"unchecked", "rawtypes"})
- ThreadLocal<DoFn<InT, OuT>> fnLocal =
+ ThreadLocal<OldDoFn<InT, OuT>> fnLocal =
(ThreadLocal) fnClones.getUnchecked((AppliedPTransform) application);
String stepName = evaluationContext.getStepName(application);
DirectStepContext stepContext =
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java
index 044abdc..53af6af 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.direct;
import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext;
import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
import org.apache.beam.sdk.transforms.AppliedPTransform;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo.Bound;
import org.apache.beam.sdk.values.PCollection;
@@ -38,16 +38,17 @@ import java.util.Collections;
* {@link Bound ParDo.Bound} primitive {@link PTransform}.
*/
class ParDoSingleEvaluatorFactory implements TransformEvaluatorFactory {
- private final LoadingCache<AppliedPTransform<?, ?, Bound<?, ?>>, ThreadLocal<DoFn<?, ?>>>
+ private final LoadingCache<AppliedPTransform<?, ?, Bound<?, ?>>, ThreadLocal<OldDoFn<?, ?>>>
fnClones;
public ParDoSingleEvaluatorFactory() {
fnClones =
CacheBuilder.newBuilder()
.build(
- new CacheLoader<AppliedPTransform<?, ?, Bound<?, ?>>, ThreadLocal<DoFn<?, ?>>>() {
+ new CacheLoader<
+ AppliedPTransform<?, ?, Bound<?, ?>>, ThreadLocal<OldDoFn<?, ?>>>() {
@Override
- public ThreadLocal<DoFn<?, ?>> load(AppliedPTransform<?, ?, Bound<?, ?>> key)
+ public ThreadLocal<OldDoFn<?, ?>> load(AppliedPTransform<?, ?, Bound<?, ?>> key)
throws Exception {
@SuppressWarnings({"unchecked", "rawtypes"})
ThreadLocal threadLocal =
@@ -80,7 +81,7 @@ class ParDoSingleEvaluatorFactory implements TransformEvaluatorFactory {
.getOrCreateStepContext(stepName, stepName);
@SuppressWarnings({"unchecked", "rawtypes"})
- ThreadLocal<DoFn<InputT, OutputT>> fnLocal =
+ ThreadLocal<OldDoFn<InputT, OutputT>> fnLocal =
(ThreadLocal) fnClones.getUnchecked((AppliedPTransform) application);
try {
ParDoEvaluator<InputT> parDoEvaluator =
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
index 7fac1e3..d021b43 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
@@ -21,7 +21,7 @@ import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.io.Read;
import org.apache.beam.sdk.transforms.AppliedPTransform;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import javax.annotation.Nullable;
@@ -38,8 +38,8 @@ public interface TransformEvaluatorFactory {
* Create a new {@link TransformEvaluator} for the application of the {@link PTransform}.
*
* <p>Any work that must be done before input elements are processed (such as calling
- * {@link DoFn#startBundle(DoFn.Context)}) must be done before the {@link TransformEvaluator} is
- * made available to the caller.
+ * {@link OldDoFn#startBundle(OldDoFn.Context)}) must be done before the
+ * {@link TransformEvaluator} is made available to the caller.
*
* <p>May return null if the application cannot produce an evaluator (for example, it is a
* {@link Read} {@link PTransform} where all evaluators are in-use).
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
index d6ee6ea..cee4001 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
@@ -23,9 +23,9 @@ import static com.google.common.base.Preconditions.checkArgument;
import org.apache.beam.sdk.io.Write;
import org.apache.beam.sdk.io.Write.Bound;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Values;
@@ -101,7 +101,7 @@ class WriteWithShardingFactory implements PTransformOverrideFactory {
}
@VisibleForTesting
- static class KeyBasedOnCountFn<T> extends DoFn<T, KV<Integer, T>> {
+ static class KeyBasedOnCountFn<T> extends OldDoFn<T, KV<Integer, T>> {
@VisibleForTesting
static final int MIN_SHARDS_FOR_LOG = 3;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java
index 353eef6..529316c 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.io.CountingInput;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.AppliedPTransform;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.View;
@@ -62,9 +62,9 @@ public class ConsumerTrackingPipelineVisitorTest implements Serializable {
p.apply("listCreate", Create.of("foo", "bar"))
.apply(
ParDo.of(
- new DoFn<String, String>() {
+ new OldDoFn<String, String>() {
@Override
- public void processElement(DoFn<String, String>.ProcessContext c)
+ public void processElement(OldDoFn<String, String>.ProcessContext c)
throws Exception {
c.output(Integer.toString(c.element().length()));
}
@@ -109,9 +109,9 @@ public class ConsumerTrackingPipelineVisitorTest implements Serializable {
PCollection<String> transformed =
created.apply(
ParDo.of(
- new DoFn<String, String>() {
+ new OldDoFn<String, String>() {
@Override
- public void processElement(DoFn<String, String>.ProcessContext c)
+ public void processElement(OldDoFn<String, String>.ProcessContext c)
throws Exception {
c.output(Integer.toString(c.element().length()));
}
@@ -140,9 +140,9 @@ public class ConsumerTrackingPipelineVisitorTest implements Serializable {
PCollection<String> transformed =
created.apply(
ParDo.of(
- new DoFn<String, String>() {
+ new OldDoFn<String, String>() {
@Override
- public void processElement(DoFn<String, String>.ProcessContext c)
+ public void processElement(OldDoFn<String, String>.ProcessContext c)
throws Exception {
c.output(Integer.toString(c.element().length()));
}
@@ -157,9 +157,9 @@ public class ConsumerTrackingPipelineVisitorTest implements Serializable {
p.apply(Create.of("1", "2", "3"))
.apply(
ParDo.of(
- new DoFn<String, String>() {
+ new OldDoFn<String, String>() {
@Override
- public void processElement(DoFn<String, String>.ProcessContext c)
+ public void processElement(OldDoFn<String, String>.ProcessContext c)
throws Exception {
c.output(Integer.toString(c.element().length()));
}
@@ -182,9 +182,9 @@ public class ConsumerTrackingPipelineVisitorTest implements Serializable {
PCollection<String> transformed =
created.apply(
ParDo.of(
- new DoFn<String, String>() {
+ new OldDoFn<String, String>() {
@Override
- public void processElement(DoFn<String, String>.ProcessContext c)
+ public void processElement(OldDoFn<String, String>.ProcessContext c)
throws Exception {
c.output(Integer.toString(c.element().length()));
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
index 09707bd..29dea32 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java
@@ -32,9 +32,9 @@ import org.apache.beam.sdk.runners.PipelineRunner;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.transforms.Count;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.transforms.SimpleFunction;
@@ -159,7 +159,7 @@ public class DirectRunnerTest implements Serializable {
@Test
public void transformDisplayDataExceptionShouldFail() {
- DoFn<Integer, Integer> brokenDoFn = new DoFn<Integer, Integer>() {
+ OldDoFn<Integer, Integer> brokenDoFn = new OldDoFn<Integer, Integer>() {
@Override
public void processElement(ProcessContext c) throws Exception {}
@@ -211,7 +211,7 @@ public class DirectRunnerTest implements Serializable {
/**
- * Tests that a {@link DoFn} that mutates an output with a good equals() fails in the
+ * Tests that a {@link OldDoFn} that mutates an output with a good equals() fails in the
* {@link DirectRunner}.
*/
@Test
@@ -220,7 +220,7 @@ public class DirectRunnerTest implements Serializable {
pipeline
.apply(Create.of(42))
- .apply(ParDo.of(new DoFn<Integer, List<Integer>>() {
+ .apply(ParDo.of(new OldDoFn<Integer, List<Integer>>() {
@Override public void processElement(ProcessContext c) {
List<Integer> outputList = Arrays.asList(1, 2, 3, 4);
c.output(outputList);
@@ -236,7 +236,7 @@ public class DirectRunnerTest implements Serializable {
}
/**
- * Tests that a {@link DoFn} that mutates an output with a good equals() fails in the
+ * Tests that a {@link OldDoFn} that mutates an output with a good equals() fails in the
* {@link DirectRunner}.
*/
@Test
@@ -245,7 +245,7 @@ public class DirectRunnerTest implements Serializable {
pipeline
.apply(Create.of(42))
- .apply(ParDo.of(new DoFn<Integer, List<Integer>>() {
+ .apply(ParDo.of(new OldDoFn<Integer, List<Integer>>() {
@Override public void processElement(ProcessContext c) {
List<Integer> outputList = Arrays.asList(1, 2, 3, 4);
c.output(outputList);
@@ -260,7 +260,7 @@ public class DirectRunnerTest implements Serializable {
}
/**
- * Tests that a {@link DoFn} that mutates an output with a bad equals() still fails
+ * Tests that a {@link OldDoFn} that mutates an output with a bad equals() still fails
* in the {@link DirectRunner}.
*/
@Test
@@ -269,7 +269,7 @@ public class DirectRunnerTest implements Serializable {
pipeline
.apply(Create.of(42))
- .apply(ParDo.of(new DoFn<Integer, byte[]>() {
+ .apply(ParDo.of(new OldDoFn<Integer, byte[]>() {
@Override public void processElement(ProcessContext c) {
byte[] outputArray = new byte[]{0x1, 0x2, 0x3};
c.output(outputArray);
@@ -285,7 +285,7 @@ public class DirectRunnerTest implements Serializable {
}
/**
- * Tests that a {@link DoFn} that mutates its input with a good equals() fails in the
+ * Tests that a {@link OldDoFn} that mutates its input with a good equals() fails in the
* {@link DirectRunner}.
*/
@Test
@@ -295,7 +295,7 @@ public class DirectRunnerTest implements Serializable {
pipeline
.apply(Create.of(Arrays.asList(1, 2, 3), Arrays.asList(4, 5, 6))
.withCoder(ListCoder.of(VarIntCoder.of())))
- .apply(ParDo.of(new DoFn<List<Integer>, Integer>() {
+ .apply(ParDo.of(new OldDoFn<List<Integer>, Integer>() {
@Override public void processElement(ProcessContext c) {
List<Integer> inputList = c.element();
inputList.set(0, 37);
@@ -310,7 +310,7 @@ public class DirectRunnerTest implements Serializable {
}
/**
- * Tests that a {@link DoFn} that mutates an input with a bad equals() still fails
+ * Tests that a {@link OldDoFn} that mutates an input with a bad equals() still fails
* in the {@link DirectRunner}.
*/
@Test
@@ -319,7 +319,7 @@ public class DirectRunnerTest implements Serializable {
pipeline
.apply(Create.of(new byte[]{0x1, 0x2, 0x3}, new byte[]{0x4, 0x5, 0x6}))
- .apply(ParDo.of(new DoFn<byte[], Integer>() {
+ .apply(ParDo.of(new OldDoFn<byte[], Integer>() {
@Override public void processElement(ProcessContext c) {
byte[] inputArray = c.element();
inputArray[0] = 0xa;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java
index d40cf93..db934e5 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.coders.ByteArrayCoder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -213,9 +213,9 @@ public class ImmutabilityCheckingBundleFactoryTest {
CommittedBundle<byte[]> committed = intermediate.commit(Instant.now());
}
- private static class IdentityDoFn<T> extends DoFn<T, T> {
+ private static class IdentityDoFn<T> extends OldDoFn<T, T> {
@Override
- public void processElement(DoFn<T, T>.ProcessContext c) throws Exception {
+ public void processElement(OldDoFn<T, T>.ProcessContext c) throws Exception {
c.output(c.element());
}
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java
index 890e06d..e1be120 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java
@@ -22,7 +22,7 @@ import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.AppliedPTransform;
import org.apache.beam.sdk.transforms.Count;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.util.IllegalMutationException;
import org.apache.beam.sdk.util.WindowedValue;
@@ -59,9 +59,9 @@ public class ImmutabilityEnforcementFactoryTest implements Serializable {
p.apply(Create.of("foo".getBytes(), "spamhameggs".getBytes()))
.apply(
ParDo.of(
- new DoFn<byte[], byte[]>() {
+ new OldDoFn<byte[], byte[]>() {
@Override
- public void processElement(DoFn<byte[], byte[]>.ProcessContext c)
+ public void processElement(OldDoFn<byte[], byte[]>.ProcessContext c)
throws Exception {
c.element()[0] = 'b';
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java
index aa0d976..9e273ad 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java
@@ -28,9 +28,9 @@ import org.apache.beam.sdk.coders.VarIntCoder;
import org.apache.beam.sdk.coders.VoidCoder;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.Keys;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.KV;
@@ -180,9 +180,9 @@ public class KeyedPValueTrackingVisitorTest {
}
}
- private static class IdentityFn<K> extends DoFn<K, K> {
+ private static class IdentityFn<K> extends OldDoFn<K, K> {
@Override
- public void processElement(DoFn<K, K>.ProcessContext c) throws Exception {
+ public void processElement(OldDoFn<K, K>.ProcessContext c) throws Exception {
c.output(c.element());
}
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
index 07f478d..3208841 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
@@ -30,7 +30,7 @@ import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.AppliedPTransform;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.View;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -169,7 +169,7 @@ public class ParDoEvaluatorTest {
ImmutableMap.<TupleTag<?>, PCollection<?>>of(mainOutputTag, output));
}
- private static class RecorderFn extends DoFn<Integer, Integer> {
+ private static class RecorderFn extends OldDoFn<Integer, Integer> {
private Collection<Integer> processed;
private final PCollectionView<Integer> view;
@@ -179,7 +179,7 @@ public class ParDoEvaluatorTest {
}
@Override
- public void processElement(DoFn<Integer, Integer>.ProcessContext c) throws Exception {
+ public void processElement(OldDoFn<Integer, Integer>.ProcessContext c) throws Exception {
processed.add(c.element());
c.output(c.element() + c.sideInput(view));
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java
index c0ab4df..19094cb 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java
@@ -31,7 +31,7 @@ import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.ParDo.BoundMulti;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -80,7 +80,7 @@ public class ParDoMultiEvaluatorFactoryTest implements Serializable {
BoundMulti<String, KV<String, Integer>> pardo =
ParDo.of(
- new DoFn<String, KV<String, Integer>>() {
+ new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.output(KV.<String, Integer>of(c.element(), c.element().length()));
@@ -170,7 +170,7 @@ public class ParDoMultiEvaluatorFactoryTest implements Serializable {
BoundMulti<String, KV<String, Integer>> pardo =
ParDo.of(
- new DoFn<String, KV<String, Integer>>() {
+ new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.output(KV.<String, Integer>of(c.element(), c.element().length()));
@@ -254,7 +254,7 @@ public class ParDoMultiEvaluatorFactoryTest implements Serializable {
StateNamespaces.window(GlobalWindow.Coder.INSTANCE, GlobalWindow.INSTANCE);
BoundMulti<String, KV<String, Integer>> pardo =
ParDo.of(
- new DoFn<String, KV<String, Integer>>() {
+ new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.windowingInternals()
@@ -354,7 +354,7 @@ public class ParDoMultiEvaluatorFactoryTest implements Serializable {
BoundMulti<String, KV<String, Integer>> pardo =
ParDo.of(
- new DoFn<String, KV<String, Integer>>() {
+ new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.windowingInternals().stateInternals();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java
index d778da6..a4fd570 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java
@@ -31,7 +31,7 @@ import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
@@ -73,7 +73,7 @@ public class ParDoSingleEvaluatorFactoryTest implements Serializable {
PCollection<Integer> collection =
input.apply(
ParDo.of(
- new DoFn<String, Integer>() {
+ new OldDoFn<String, Integer>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().length());
@@ -127,7 +127,7 @@ public class ParDoSingleEvaluatorFactoryTest implements Serializable {
PCollection<Integer> collection =
input.apply(
ParDo.of(
- new DoFn<String, Integer>() {
+ new OldDoFn<String, Integer>() {
@Override
public void processElement(ProcessContext c) {
c.sideOutput(sideOutputTag, c.element().length());
@@ -179,7 +179,7 @@ public class ParDoSingleEvaluatorFactoryTest implements Serializable {
StateNamespaces.window(GlobalWindow.Coder.INSTANCE, GlobalWindow.INSTANCE);
ParDo.Bound<String, KV<String, Integer>> pardo =
ParDo.of(
- new DoFn<String, KV<String, Integer>>() {
+ new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.windowingInternals()
@@ -265,7 +265,7 @@ public class ParDoSingleEvaluatorFactoryTest implements Serializable {
ParDo.Bound<String, KV<String, Integer>> pardo =
ParDo.of(
- new DoFn<String, KV<String, Integer>>() {
+ new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.windowingInternals().stateInternals();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java
index 7c7005c..22f148a 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java
@@ -38,9 +38,9 @@ import org.apache.beam.sdk.coders.VarLongCoder;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.AppliedPTransform;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Filter;
import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.WithKeys;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -76,7 +76,6 @@ import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
-
import javax.annotation.Nullable;
/**
@@ -105,9 +104,9 @@ public class WatermarkManagerTest implements Serializable {
createdInts = p.apply("createdInts", Create.of(1, 2, 3));
filtered = createdInts.apply("filtered", Filter.greaterThan(1));
- filteredTimesTwo = filtered.apply("timesTwo", ParDo.of(new DoFn<Integer, Integer>() {
+ filteredTimesTwo = filtered.apply("timesTwo", ParDo.of(new OldDoFn<Integer, Integer>() {
@Override
- public void processElement(DoFn<Integer, Integer>.ProcessContext c) throws Exception {
+ public void processElement(OldDoFn<Integer, Integer>.ProcessContext c) throws Exception {
c.output(c.element() * 2);
}
}));
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java
index 56737a4..716c8ad 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java
@@ -32,7 +32,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.Keys;
import org.apache.beam.sdk.transforms.PTransform;
@@ -230,7 +230,7 @@ public class TFIDF {
// Create a collection of pairs mapping a URI to each
// of the words in the document associated with that that URI.
PCollection<KV<URI, String>> uriToWords = uriToContent
- .apply("SplitWords", ParDo.of(new DoFn<KV<URI, String>, KV<URI, String>>() {
+ .apply("SplitWords", ParDo.of(new OldDoFn<KV<URI, String>, KV<URI, String>>() {
private static final long serialVersionUID = 0;
@Override
@@ -275,7 +275,7 @@ public class TFIDF {
// by the URI key.
PCollection<KV<URI, KV<String, Long>>> uriToWordAndCount = uriAndWordToCount
.apply("ShiftKeys", ParDo.of(
- new DoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
+ new OldDoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
private static final long serialVersionUID = 0;
@Override
@@ -316,7 +316,7 @@ public class TFIDF {
// divided by the total number of words in the document.
PCollection<KV<String, KV<URI, Double>>> wordToUriAndTf = uriToWordAndCountAndTotal
.apply("ComputeTermFrequencies", ParDo.of(
- new DoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+ new OldDoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
private static final long serialVersionUID = 0;
@Override
@@ -339,11 +339,11 @@ public class TFIDF {
// documents in which the word appears divided by the total
// number of documents in the corpus. Note how the total number of
// documents is passed as a side input; the same value is
- // presented to each invocation of the DoFn.
+ // presented to each invocation of the OldDoFn.
PCollection<KV<String, Double>> wordToDf = wordToDocCount
.apply("ComputeDocFrequencies", ParDo
.withSideInputs(totalDocuments)
- .of(new DoFn<KV<String, Long>, KV<String, Double>>() {
+ .of(new OldDoFn<KV<String, Long>, KV<String, Double>>() {
private static final long serialVersionUID = 0;
@Override
@@ -375,7 +375,7 @@ public class TFIDF {
return wordToUriAndTfAndDf
.apply("ComputeTfIdf", ParDo.of(
- new DoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+ new OldDoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
private static final long serialVersionUID = 0;
@Override
@@ -416,7 +416,7 @@ public class TFIDF {
@Override
public PDone apply(PCollection<KV<String, KV<URI, Double>>> wordToUriAndTfIdf) {
return wordToUriAndTfIdf
- .apply("Format", ParDo.of(new DoFn<KV<String, KV<URI, Double>>, String>() {
+ .apply("Format", ParDo.of(new OldDoFn<KV<String, KV<URI, Double>>, String>() {
private static final long serialVersionUID = 0;
@Override
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java
index c54229d..080cdc9 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.MapElements;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
@@ -38,7 +38,7 @@ import org.apache.beam.sdk.values.PCollection;
public class WordCount {
- public static class ExtractWordsFn extends DoFn<String, String> {
+ public static class ExtractWordsFn extends OldDoFn<String, String> {
private final Aggregator<Long, Long> emptyLines =
createAggregator("emptyLines", new Sum.SumLongFn());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java
index c0ff85d..068404a 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java
@@ -29,7 +29,7 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.Filter;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.PTransform;
@@ -92,7 +92,7 @@ public class AutoComplete {
// Map the KV outputs of Count into our own CompletionCandiate class.
.apply("CreateCompletionCandidates", ParDo.of(
- new DoFn<KV<String, Long>, CompletionCandidate>() {
+ new OldDoFn<KV<String, Long>, CompletionCandidate>() {
private static final long serialVersionUID = 0;
@Override
@@ -182,7 +182,7 @@ public class AutoComplete {
}
private static class FlattenTops
- extends DoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
+ extends OldDoFn<KV<String, List<CompletionCandidate>>, CompletionCandidate> {
private static final long serialVersionUID = 0;
@Override
@@ -236,10 +236,10 @@ public class AutoComplete {
}
/**
- * A DoFn that keys each candidate by all its prefixes.
+ * A OldDoFn that keys each candidate by all its prefixes.
*/
private static class AllPrefixes
- extends DoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
+ extends OldDoFn<CompletionCandidate, KV<String, CompletionCandidate>> {
private static final long serialVersionUID = 0;
private final int minPrefix;
@@ -314,7 +314,7 @@ public class AutoComplete {
}
}
- static class ExtractWordsFn extends DoFn<String, String> {
+ static class ExtractWordsFn extends OldDoFn<String, String> {
private final Aggregator<Long, Long> emptyLines =
createAggregator("emptyLines", new Sum.SumLongFn());
@@ -340,8 +340,8 @@ public class AutoComplete {
* Takes as input a the top candidates per prefix, and emits an entity
* suitable for writing to Datastore.
*/
- static class FormatForPerTaskLocalFile extends DoFn<KV<String, List<CompletionCandidate>>, String>
- implements DoFn.RequiresWindowAccess{
+ static class FormatForPerTaskLocalFile extends OldDoFn<KV<String, List<CompletionCandidate>>, String>
+ implements OldDoFn.RequiresWindowAccess{
private static final long serialVersionUID = 0;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java
index f456b27..7d7c0c7 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.io.Read;
import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.join.CoGbkResult;
import org.apache.beam.sdk.transforms.join.CoGroupByKey;
@@ -76,7 +76,7 @@ public class JoinExamples {
// country code 'key' -> string of <event info>, <country name>
PCollection<KV<String, String>> finalResultCollection =
kvpCollection.apply("Process", ParDo.of(
- new DoFn<KV<String, CoGbkResult>, KV<String, String>>() {
+ new OldDoFn<KV<String, CoGbkResult>, KV<String, String>>() {
private static final long serialVersionUID = 0;
@Override
@@ -98,7 +98,7 @@ public class JoinExamples {
}));
return finalResultCollection
- .apply("Format", ParDo.of(new DoFn<KV<String, String>, String>() {
+ .apply("Format", ParDo.of(new OldDoFn<KV<String, String>, String>() {
private static final long serialVersionUID = 0;
@Override
@@ -110,7 +110,7 @@ public class JoinExamples {
}));
}
- static class ExtractEventDataFn extends DoFn<String, KV<String, String>> {
+ static class ExtractEventDataFn extends OldDoFn<String, KV<String, String>> {
private static final long serialVersionUID = 0;
@Override
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java
index 8756abe..395b409 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java
@@ -30,7 +30,7 @@ import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.PCollection;
import org.apache.flink.api.common.typeinfo.TypeInformation;
@@ -326,7 +326,7 @@ public class KafkaIOExamples {
* Print contents to stdout
* @param <T> type of the input
*/
- private static class PrintFn<T> extends DoFn<T, T> {
+ private static class PrintFn<T> extends OldDoFn<T, T> {
@Override
public void processElement(ProcessContext c) throws Exception {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java
index 4e81420..8c31783 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
@@ -49,7 +49,7 @@ public class KafkaWindowedWordCountExample {
static final String GROUP_ID = "myGroup"; // Default groupId
static final String ZOOKEEPER = "localhost:2181"; // Default zookeeper to connect to for Kafka
- public static class ExtractWordsFn extends DoFn<String, String> {
+ public static class ExtractWordsFn extends OldDoFn<String, String> {
private final Aggregator<Long, Long> emptyLines =
createAggregator("emptyLines", new Sum.SumLongFn());
@@ -71,7 +71,7 @@ public class KafkaWindowedWordCountExample {
}
}
- public static class FormatAsStringFn extends DoFn<KV<String, Long>, String> {
+ public static class FormatAsStringFn extends OldDoFn<KV<String, Long>, String> {
@Override
public void processElement(ProcessContext c) {
String row = c.element().getKey() + " - " + c.element().getValue() + " @ " + c.timestamp().toString();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java
index 1b532a7..d149e4e 100644
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java
+++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
@@ -59,7 +59,7 @@ public class WindowedWordCount {
static final long WINDOW_SIZE = 10; // Default window duration in seconds
static final long SLIDE_SIZE = 5; // Default window slide in seconds
- static class FormatAsStringFn extends DoFn<KV<String, Long>, String> {
+ static class FormatAsStringFn extends OldDoFn<KV<String, Long>, String> {
@Override
public void processElement(ProcessContext c) {
String row = c.element().getKey() + " - " + c.element().getValue() + " @ " + c.timestamp().toString();
@@ -67,7 +67,7 @@ public class WindowedWordCount {
}
}
- static class ExtractWordsFn extends DoFn<String, String> {
+ static class ExtractWordsFn extends OldDoFn<String, String> {
private final Aggregator<Long, Long> emptyLines =
createAggregator("emptyLines", new Sum.SumLongFn());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java
index 0bba0d0..01a3ab2 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java
@@ -39,7 +39,7 @@ import org.apache.beam.sdk.io.BoundedSource;
import org.apache.beam.sdk.io.Read;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.CombineFnBase;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.PTransform;
@@ -391,7 +391,7 @@ class FlinkBatchTransformTranslators {
inputDataSet.groupBy(new KvKeySelector<InputT, K>(inputCoder.getKeyCoder()));
// construct a map from side input to WindowingStrategy so that
- // the DoFn runner can map main-input windows to side input windows
+ // the OldDoFn runner can map main-input windows to side input windows
Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputStrategies = new HashMap<>();
for (PCollectionView<?> sideInput: transform.getSideInputs()) {
sideInputStrategies.put(sideInput, sideInput.getWindowingStrategyInternal());
@@ -494,7 +494,7 @@ class FlinkBatchTransformTranslators {
DataSet<WindowedValue<InputT>> inputDataSet =
context.getInputDataSet(context.getInput(transform));
- final DoFn<InputT, OutputT> doFn = transform.getFn();
+ final OldDoFn<InputT, OutputT> doFn = transform.getFn();
TypeInformation<WindowedValue<OutputT>> typeInformation =
context.getTypeInfo(context.getOutput(transform));
@@ -502,7 +502,7 @@ class FlinkBatchTransformTranslators {
List<PCollectionView<?>> sideInputs = transform.getSideInputs();
// construct a map from side input to WindowingStrategy so that
- // the DoFn runner can map main-input windows to side input windows
+ // the OldDoFn runner can map main-input windows to side input windows
Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputStrategies = new HashMap<>();
for (PCollectionView<?> sideInput: sideInputs) {
sideInputStrategies.put(sideInput, sideInput.getWindowingStrategyInternal());
@@ -539,7 +539,7 @@ class FlinkBatchTransformTranslators {
DataSet<WindowedValue<InputT>> inputDataSet =
context.getInputDataSet(context.getInput(transform));
- final DoFn<InputT, OutputT> doFn = transform.getFn();
+ final OldDoFn<InputT, OutputT> doFn = transform.getFn();
Map<TupleTag<?>, PCollection<?>> outputs = context.getOutput(transform).getAll();
@@ -578,7 +578,7 @@ class FlinkBatchTransformTranslators {
List<PCollectionView<?>> sideInputs = transform.getSideInputs();
// construct a map from side input to WindowingStrategy so that
- // the DoFn runner can map main-input windows to side input windows
+ // the OldDoFn runner can map main-input windows to side input windows
Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputStrategies = new HashMap<>();
for (PCollectionView<?> sideInput: sideInputs) {
sideInputStrategies.put(sideInput, sideInput.getWindowingStrategyInternal());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java
index fa6b387..5b55d42 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java
@@ -35,11 +35,10 @@ import org.apache.beam.sdk.io.BoundedSource;
import org.apache.beam.sdk.io.Read;
import org.apache.beam.sdk.io.Sink;
import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.io.UnboundedSource;
import org.apache.beam.sdk.io.Write;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.PTransform;
@@ -71,8 +70,6 @@ import org.apache.flink.streaming.api.datastream.DataStreamSink;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
-import org.apache.flink.streaming.api.functions.IngestionTimeExtractor;
-import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.util.Collector;
import org.joda.time.Instant;
import org.slf4j.Logger;
@@ -346,8 +343,8 @@ public class FlinkStreamingTransformTranslators {
context.setOutputDataStream(context.getOutput(transform), windowedStream);
}
- private static <T, W extends BoundedWindow> DoFn<T, T> createWindowAssigner(final WindowFn<T, W> windowFn) {
- return new DoFn<T, T>() {
+ private static <T, W extends BoundedWindow> OldDoFn<T, T> createWindowAssigner(final WindowFn<T, W> windowFn) {
+ return new OldDoFn<T, T>() {
@Override
public void processElement(final ProcessContext c) throws Exception {
[17/19] incubator-beam git commit: Update Dataflow runner to worker
container supporting OldDoFn
Posted by dh...@apache.org.
Update Dataflow runner to worker container supporting OldDoFn
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/3466a0e7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/3466a0e7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/3466a0e7
Branch: refs/heads/master
Commit: 3466a0e72d1f025c2d4562635a20730778b497b0
Parents: e07c339
Author: Kenneth Knowles <kl...@google.com>
Authored: Wed Aug 3 17:47:23 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700
----------------------------------------------------------------------
.../java/org/apache/beam/runners/dataflow/DataflowRunner.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/3466a0e7/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
index d762d50..abcf415 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
@@ -212,9 +212,9 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
// Default Docker container images that execute Dataflow worker harness, residing in Google
// Container Registry, separately for Batch and Streaming.
public static final String BATCH_WORKER_HARNESS_CONTAINER_IMAGE =
- "dataflow.gcr.io/v1beta3/beam-java-batch:beam-master-20160714";
+ "dataflow.gcr.io/v1beta3/beam-java-batch:beam-master-20160804-dofn";
public static final String STREAMING_WORKER_HARNESS_CONTAINER_IMAGE =
- "dataflow.gcr.io/v1beta3/beam-java-streaming:beam-master-20160714";
+ "dataflow.gcr.io/v1beta3/beam-java-streaming:beam-master-20160804-dofn";
// The limit of CreateJob request size.
private static final int CREATE_JOB_REQUEST_LIMIT_BYTES = 10 * 1024 * 1024;
[14/19] incubator-beam git commit: Port MinimalWordCount example from
OldDoFn to DoFn
Posted by dh...@apache.org.
Port MinimalWordCount example from OldDoFn to DoFn
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/4ceec0e8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/4ceec0e8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/4ceec0e8
Branch: refs/heads/master
Commit: 4ceec0e86f1c4e885168957299dbe81c61fbc7e7
Parents: 64481d0
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:28:42 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700
----------------------------------------------------------------------
.../java/org/apache/beam/examples/MinimalWordCount.java | 9 ++++-----
1 file changed, 4 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4ceec0e8/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
index ab0bb6d..df725e3 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/MinimalWordCount.java
@@ -22,8 +22,8 @@ import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Count;
+import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.MapElements;
-import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SimpleFunction;
import org.apache.beam.sdk.values.KV;
@@ -89,12 +89,11 @@ public class MinimalWordCount {
// the input text (a set of Shakespeare's texts).
p.apply(TextIO.Read.from("gs://dataflow-samples/shakespeare/*"))
// Concept #2: Apply a ParDo transform to our PCollection of text lines. This ParDo invokes a
- // OldDoFn (defined in-line) on each element that tokenizes the text line into individua
- // words.
+ // DoFn (defined in-line) on each element that tokenizes the text line into individual words.
// The ParDo returns a PCollection<String>, where each element is an individual word in
// Shakespeare's collected texts.
- .apply("ExtractWords", ParDo.of(new OldDoFn<String, String>() {
- @Override
+ .apply("ExtractWords", ParDo.of(new DoFn<String, String>() {
+ @ProcessElement
public void processElement(ProcessContext c) {
for (String word : c.element().split("[^a-zA-Z']+")) {
if (!word.isEmpty()) {
[07/19] incubator-beam git commit: Rename DoFn to OldDoFn
Posted by dh...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
index 77c857c..7917aec 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java
@@ -23,8 +23,8 @@ import static com.google.common.base.Preconditions.checkNotNull;
import org.apache.beam.sdk.coders.AtomicCoder;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.CoderException;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.util.VarInt;
import com.google.common.base.MoreObjects;
@@ -38,8 +38,8 @@ import java.util.Objects;
/**
* Provides information about the pane an element belongs to. Every pane is implicitly associated
* with a window. Panes are observable only via the
- * {@link org.apache.beam.sdk.transforms.DoFn.ProcessContext#pane} method of the context
- * passed to a {@link DoFn#processElement} overridden method.
+ * {@link OldDoFn.ProcessContext#pane} method of the context
+ * passed to a {@link OldDoFn#processElement} overridden method.
*
* <p>Note: This does not uniquely identify a pane, and should not be used for comparisons.
*/
@@ -74,8 +74,8 @@ public final class PaneInfo {
* definitions:
* <ol>
* <li>We'll call a pipeline 'simple' if it does not use
- * {@link org.apache.beam.sdk.transforms.DoFn.Context#outputWithTimestamp} in
- * any {@code DoFn}, and it uses the same
+ * {@link OldDoFn.Context#outputWithTimestamp} in
+ * any {@code OldDoFn}, and it uses the same
* {@link org.apache.beam.sdk.transforms.windowing.Window.Bound#withAllowedLateness}
* argument value on all windows (or uses the default of {@link org.joda.time.Duration#ZERO}).
* <li>We'll call an element 'locally late', from the point of view of a computation on a
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
index fe8b66f..03ff481 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java
@@ -21,8 +21,8 @@ import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.annotations.Experimental.Kind;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.Coder.NonDeterministicException;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -645,7 +645,7 @@ public class Window {
// We first apply a (trivial) transform to the input PCollection to produce a new
// PCollection. This ensures that we don't modify the windowing strategy of the input
// which may be used elsewhere.
- .apply("Identity", ParDo.of(new DoFn<T, T>() {
+ .apply("Identity", ParDo.of(new OldDoFn<T, T>() {
@Override public void processElement(ProcessContext c) {
c.output(c.element());
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java
index a62444f..dd36367 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java
@@ -107,7 +107,7 @@ public abstract class BaseExecutionContext<T extends ExecutionContext.StepContex
/**
* Hook for subclasses to implement that will be called whenever
- * {@link org.apache.beam.sdk.transforms.DoFn.Context#output}
+ * {@link OldDoFn.Context#output}
* is called.
*/
@Override
@@ -115,7 +115,7 @@ public abstract class BaseExecutionContext<T extends ExecutionContext.StepContex
/**
* Hook for subclasses to implement that will be called whenever
- * {@link org.apache.beam.sdk.transforms.DoFn.Context#sideOutput}
+ * {@link OldDoFn.Context#sideOutput}
* is called.
*/
@Override
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java
index ce35c24..e14aec8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java
@@ -21,6 +21,7 @@ package org.apache.beam.sdk.util;
import static com.google.common.base.Preconditions.checkState;
import org.apache.beam.sdk.transforms.Combine;
+
import java.util.HashMap;
import java.util.Map;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineContextFactory.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineContextFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineContextFactory.java
index f73fae3..149d276 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineContextFactory.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineContextFactory.java
@@ -19,7 +19,7 @@ package org.apache.beam.sdk.util;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.CombineWithContext.Context;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.state.StateContext;
import org.apache.beam.sdk.values.PCollectionView;
@@ -49,9 +49,9 @@ public class CombineContextFactory {
}
/**
- * Returns a {@code Combine.Context} that wraps a {@code DoFn.ProcessContext}.
+ * Returns a {@code Combine.Context} that wraps a {@code OldDoFn.ProcessContext}.
*/
- public static Context createFromProcessContext(final DoFn<?, ?>.ProcessContext c) {
+ public static Context createFromProcessContext(final OldDoFn<?, ?>.ProcessContext c) {
return new Context() {
@Override
public PipelineOptions getPipelineOptions() {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java
index 01bde82..1c2f554 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java
@@ -42,14 +42,14 @@ public interface ExecutionContext {
/**
* Hook for subclasses to implement that will be called whenever
- * {@link org.apache.beam.sdk.transforms.DoFn.Context#output}
+ * {@link OldDoFn.Context#output}
* is called.
*/
void noteOutput(WindowedValue<?> output);
/**
* Hook for subclasses to implement that will be called whenever
- * {@link org.apache.beam.sdk.transforms.DoFn.Context#sideOutput}
+ * {@link OldDoFn.Context#sideOutput}
* is called.
*/
void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output);
@@ -71,14 +71,14 @@ public interface ExecutionContext {
/**
* Hook for subclasses to implement that will be called whenever
- * {@link org.apache.beam.sdk.transforms.DoFn.Context#output}
+ * {@link OldDoFn.Context#output}
* is called.
*/
void noteOutput(WindowedValue<?> output);
/**
* Hook for subclasses to implement that will be called whenever
- * {@link org.apache.beam.sdk.transforms.DoFn.Context#sideOutput}
+ * {@link OldDoFn.Context#sideOutput}
* is called.
*/
void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java
index 96802ae..eb0a91a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java
@@ -21,6 +21,7 @@ package org.apache.beam.sdk.util;
import static com.google.common.base.Preconditions.checkArgument;
import org.apache.beam.sdk.transforms.Combine;
+
import java.util.Arrays;
/**
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java
index 9dc4f68..ae3d391 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java
@@ -19,7 +19,7 @@ package org.apache.beam.sdk.util;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.CombineFnBase.PerKeyCombineFn;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import java.io.Serializable;
@@ -43,62 +43,62 @@ public interface PerKeyCombineFnRunner<K, InputT, AccumT, OutputT> extends Seria
/////////////////////////////////////////////////////////////////////////////
/**
- * Forwards the call to a {@link PerKeyCombineFn} to create the accumulator in a {@link DoFn}.
+ * Forwards the call to a {@link PerKeyCombineFn} to create the accumulator in a {@link OldDoFn}.
*
- * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+ * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
* if it is required.
*/
- public AccumT createAccumulator(K key, DoFn<?, ?>.ProcessContext c);
+ public AccumT createAccumulator(K key, OldDoFn<?, ?>.ProcessContext c);
/**
- * Forwards the call to a {@link PerKeyCombineFn} to add the input in a {@link DoFn}.
+ * Forwards the call to a {@link PerKeyCombineFn} to add the input in a {@link OldDoFn}.
*
- * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+ * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
* if it is required.
*/
- public AccumT addInput(K key, AccumT accumulator, InputT input, DoFn<?, ?>.ProcessContext c);
+ public AccumT addInput(K key, AccumT accumulator, InputT input, OldDoFn<?, ?>.ProcessContext c);
/**
- * Forwards the call to a {@link PerKeyCombineFn} to merge accumulators in a {@link DoFn}.
+ * Forwards the call to a {@link PerKeyCombineFn} to merge accumulators in a {@link OldDoFn}.
*
- * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+ * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
* if it is required.
*/
public AccumT mergeAccumulators(
- K key, Iterable<AccumT> accumulators, DoFn<?, ?>.ProcessContext c);
+ K key, Iterable<AccumT> accumulators, OldDoFn<?, ?>.ProcessContext c);
/**
- * Forwards the call to a {@link PerKeyCombineFn} to extract the output in a {@link DoFn}.
+ * Forwards the call to a {@link PerKeyCombineFn} to extract the output in a {@link OldDoFn}.
*
- * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+ * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
* if it is required.
*/
- public OutputT extractOutput(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c);
+ public OutputT extractOutput(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c);
/**
- * Forwards the call to a {@link PerKeyCombineFn} to compact the accumulator in a {@link DoFn}.
+ * Forwards the call to a {@link PerKeyCombineFn} to compact the accumulator in a {@link OldDoFn}.
*
- * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+ * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
* if it is required.
*/
- public AccumT compact(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c);
+ public AccumT compact(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c);
/**
* Forwards the call to a {@link PerKeyCombineFn} to combine the inputs and extract output
- * in a {@link DoFn}.
+ * in a {@link OldDoFn}.
*
- * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+ * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
* if it is required.
*/
- public OutputT apply(K key, Iterable<? extends InputT> inputs, DoFn<?, ?>.ProcessContext c);
+ public OutputT apply(K key, Iterable<? extends InputT> inputs, OldDoFn<?, ?>.ProcessContext c);
/**
- * Forwards the call to a {@link PerKeyCombineFn} to add all inputs in a {@link DoFn}.
+ * Forwards the call to a {@link PerKeyCombineFn} to add all inputs in a {@link OldDoFn}.
*
- * <p>It constructs a {@code CombineWithContext.Context} from {@code DoFn.ProcessContext}
+ * <p>It constructs a {@code CombineWithContext.Context} from {@code OldDoFn.ProcessContext}
* if it is required.
*/
- public AccumT addInputs(K key, Iterable<InputT> inputs, DoFn<?, ?>.ProcessContext c);
+ public AccumT addInputs(K key, Iterable<InputT> inputs, OldDoFn<?, ?>.ProcessContext c);
/////////////////////////////////////////////////////////////////////////////
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java
index 2d28682..87870a8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.transforms.CombineFnBase.PerKeyCombineFn;
import org.apache.beam.sdk.transforms.CombineWithContext;
import org.apache.beam.sdk.transforms.CombineWithContext.KeyedCombineFnWithContext;
import org.apache.beam.sdk.transforms.CombineWithContext.RequiresContextInternal;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import com.google.common.collect.Iterables;
@@ -69,39 +69,39 @@ public class PerKeyCombineFnRunners {
}
@Override
- public AccumT createAccumulator(K key, DoFn<?, ?>.ProcessContext c) {
+ public AccumT createAccumulator(K key, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFn.createAccumulator(key);
}
@Override
public AccumT addInput(
- K key, AccumT accumulator, InputT input, DoFn<?, ?>.ProcessContext c) {
+ K key, AccumT accumulator, InputT input, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFn.addInput(key, accumulator, input);
}
@Override
public AccumT mergeAccumulators(
- K key, Iterable<AccumT> accumulators, DoFn<?, ?>.ProcessContext c) {
+ K key, Iterable<AccumT> accumulators, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFn.mergeAccumulators(key, accumulators);
}
@Override
- public OutputT extractOutput(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c) {
+ public OutputT extractOutput(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFn.extractOutput(key, accumulator);
}
@Override
- public AccumT compact(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c) {
+ public AccumT compact(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFn.compact(key, accumulator);
}
@Override
- public OutputT apply(K key, Iterable<? extends InputT> inputs, DoFn<?, ?>.ProcessContext c) {
+ public OutputT apply(K key, Iterable<? extends InputT> inputs, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFn.apply(key, inputs);
}
@Override
- public AccumT addInputs(K key, Iterable<InputT> inputs, DoFn<?, ?>.ProcessContext c) {
+ public AccumT addInputs(K key, Iterable<InputT> inputs, OldDoFn<?, ?>.ProcessContext c) {
AccumT accum = keyedCombineFn.createAccumulator(key);
for (InputT input : inputs) {
accum = keyedCombineFn.addInput(key, accum, input);
@@ -165,45 +165,45 @@ public class PerKeyCombineFnRunners {
}
@Override
- public AccumT createAccumulator(K key, DoFn<?, ?>.ProcessContext c) {
+ public AccumT createAccumulator(K key, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFnWithContext.createAccumulator(key,
CombineContextFactory.createFromProcessContext(c));
}
@Override
public AccumT addInput(
- K key, AccumT accumulator, InputT value, DoFn<?, ?>.ProcessContext c) {
+ K key, AccumT accumulator, InputT value, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFnWithContext.addInput(key, accumulator, value,
CombineContextFactory.createFromProcessContext(c));
}
@Override
public AccumT mergeAccumulators(
- K key, Iterable<AccumT> accumulators, DoFn<?, ?>.ProcessContext c) {
+ K key, Iterable<AccumT> accumulators, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFnWithContext.mergeAccumulators(
key, accumulators, CombineContextFactory.createFromProcessContext(c));
}
@Override
- public OutputT extractOutput(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c) {
+ public OutputT extractOutput(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFnWithContext.extractOutput(key, accumulator,
CombineContextFactory.createFromProcessContext(c));
}
@Override
- public AccumT compact(K key, AccumT accumulator, DoFn<?, ?>.ProcessContext c) {
+ public AccumT compact(K key, AccumT accumulator, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFnWithContext.compact(key, accumulator,
CombineContextFactory.createFromProcessContext(c));
}
@Override
- public OutputT apply(K key, Iterable<? extends InputT> inputs, DoFn<?, ?>.ProcessContext c) {
+ public OutputT apply(K key, Iterable<? extends InputT> inputs, OldDoFn<?, ?>.ProcessContext c) {
return keyedCombineFnWithContext.apply(key, inputs,
CombineContextFactory.createFromProcessContext(c));
}
@Override
- public AccumT addInputs(K key, Iterable<InputT> inputs, DoFn<?, ?>.ProcessContext c) {
+ public AccumT addInputs(K key, Iterable<InputT> inputs, OldDoFn<?, ?>.ProcessContext c) {
CombineWithContext.Context combineContext = CombineContextFactory.createFromProcessContext(c);
AccumT accum = keyedCombineFnWithContext.createAccumulator(key, combineContext);
for (InputT input : inputs) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java
index 36c4a9f..9e6c7d2 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java
@@ -34,6 +34,7 @@ import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ThreadLocalRandom;
+
import javax.annotation.Nullable;
/**
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java
index 9fa0380..88ae6cc 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java
@@ -34,6 +34,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
+
import javax.annotation.Nullable;
/**
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReifyTimestampAndWindowsDoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReifyTimestampAndWindowsDoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReifyTimestampAndWindowsDoFn.java
index c2273f5..2808ca9 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReifyTimestampAndWindowsDoFn.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReifyTimestampAndWindowsDoFn.java
@@ -17,11 +17,11 @@
*/
package org.apache.beam.sdk.util;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.values.KV;
/**
- * DoFn that makes timestamps and window assignments explicit in the value part of each key/value
+ * OldDoFn that makes timestamps and window assignments explicit in the value part of each key/value
* pair.
*
* @param <K> the type of the keys of the input and output {@code PCollection}s
@@ -29,7 +29,7 @@ import org.apache.beam.sdk.values.KV;
*/
@SystemDoFnInternal
public class ReifyTimestampAndWindowsDoFn<K, V>
- extends DoFn<KV<K, V>, KV<K, WindowedValue<V>>> {
+ extends OldDoFn<KV<K, V>, KV<K, WindowedValue<V>>> {
@Override
public void processElement(ProcessContext c)
throws Exception {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java
index 6c58689..66c7cc0 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java
@@ -17,8 +17,8 @@
*/
package org.apache.beam.sdk.util;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -70,7 +70,7 @@ public class Reshuffle<K, V> extends PTransform<PCollection<KV<K, V>>, PCollecti
// set allowed lateness.
.setWindowingStrategyInternal(originalStrategy)
.apply("ExpandIterable", ParDo.of(
- new DoFn<KV<K, Iterable<V>>, KV<K, V>>() {
+ new OldDoFn<KV<K, Iterable<V>>, KV<K, V>>() {
@Override
public void processElement(ProcessContext c) {
K key = c.element().getKey();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java
index 45f6c4a..1e70aaf 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java
@@ -105,7 +105,7 @@ public class SerializableUtils {
*/
public static CloudObject ensureSerializable(Coder<?> coder) {
// Make sure that Coders are java serializable as well since
- // they are regularly captured within DoFn's.
+ // they are regularly captured within OldDoFn's.
Coder<?> copy = (Coder<?>) ensureSerializable((Serializable) coder);
CloudObject cloudObject = copy.asCloudObject();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java
index 53201a4..bb59373 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java
@@ -98,7 +98,7 @@ public class StringUtils {
}
private static final String[] STANDARD_NAME_SUFFIXES =
- new String[]{"DoFn", "Fn"};
+ new String[]{"OldDoFn", "Fn"};
/**
* Pattern to match a non-anonymous inner class.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java
index 9a42b23..b8a5cd4 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java
@@ -17,7 +17,7 @@
*/
package org.apache.beam.sdk.util;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
@@ -26,10 +26,10 @@ import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
- * Annotation to mark {@link DoFn DoFns} as an internal component of the Dataflow SDK.
+ * Annotation to mark {@link OldDoFn DoFns} as an internal component of the Dataflow SDK.
*
* <p>Currently, the only effect of this is to mark any aggregators reported by an annotated
- * {@code DoFn} as a system counter (as opposed to a user counter).
+ * {@code OldDoFn} as a system counter (as opposed to a user counter).
*
* <p>This is internal to the Dataflow SDK.
*/
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java
index c03ab4d..3212d64 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java
@@ -33,7 +33,6 @@ import com.google.common.base.MoreObjects;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-
import org.joda.time.Instant;
import java.io.IOException;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java
index e724349..f0e4812 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java
@@ -22,7 +22,7 @@ import static com.google.common.base.Preconditions.checkArgument;
import org.apache.beam.sdk.coders.ByteArrayCoder;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.StandardCoder;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import com.google.common.base.MoreObjects;
@@ -139,8 +139,8 @@ public class ValueWithRecordId<ValueT> {
ByteArrayCoder idCoder;
}
- /** {@link DoFn} to turn a {@code ValueWithRecordId<T>} back to the value {@code T}. */
- public static class StripIdsDoFn<T> extends DoFn<ValueWithRecordId<T>, T> {
+ /** {@link OldDoFn} to turn a {@code ValueWithRecordId<T>} back to the value {@code T}. */
+ public static class StripIdsDoFn<T> extends OldDoFn<ValueWithRecordId<T>, T> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().getValue());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java
index 676848c..9d341a1 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java
@@ -38,7 +38,6 @@ import com.google.common.collect.ImmutableList;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-
import org.joda.time.Instant;
import java.io.IOException;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java
index 149c497..3a1b654 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java
@@ -30,8 +30,8 @@ import java.io.IOException;
import java.util.Collection;
/**
- * Interface that may be required by some (internal) {@code DoFn}s to implement windowing. It should
- * not be necessary for general user code to interact with this at all.
+ * Interface that may be required by some (internal) {@code OldDoFn}s to implement windowing. It
+ * should not be necessary for general user code to interact with this at all.
*
* <p>This interface should be provided by runner implementors to support windowing on their runner.
*
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java
index 75b8ad8..6db532e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.util.common;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
+
import static java.util.Arrays.asList;
import com.google.common.base.Function;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java
index b60a53e..69bf77d 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java
@@ -27,7 +27,6 @@ import org.apache.beam.sdk.util.PropertyNames;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-
import org.joda.time.Instant;
import java.io.IOException;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
index 8abfb05..5137031 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
@@ -36,8 +36,8 @@ import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.RunnableOnService;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.util.UserCodeException;
@@ -146,9 +146,9 @@ public class PipelineTest {
private static PTransform<PCollection<? extends String>, PCollection<String>> addSuffix(
final String suffix) {
- return ParDo.of(new DoFn<String, String>() {
+ return ParDo.of(new OldDoFn<String, String>() {
@Override
- public void processElement(DoFn<String, String>.ProcessContext c) {
+ public void processElement(OldDoFn<String, String>.ProcessContext c) {
c.output(c.element() + suffix);
}
});
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
index 54f7ec1..41d0932 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java
@@ -31,7 +31,7 @@ import org.apache.beam.sdk.testing.NeedsRunner;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.util.CloudObject;
import org.apache.beam.sdk.util.SerializableUtils;
@@ -134,7 +134,7 @@ public class AvroCoderTest {
}
}
- private static class GetTextFn extends DoFn<Pojo, String> {
+ private static class GetTextFn extends OldDoFn<Pojo, String> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().text);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
index 817ea20..35ec6c6 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.coders.protobuf.ProtoCoder;
import org.apache.beam.sdk.testing.NeedsRunner;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.util.CloudObject;
@@ -366,7 +366,7 @@ public class CoderRegistryTest {
private static class PTransformOutputingMySerializableGeneric
extends PTransform<PCollection<String>, PCollection<KV<String, MySerializableGeneric<String>>>> {
- private class OutputDoFn extends DoFn<String, KV<String, MySerializableGeneric<String>>> {
+ private class OutputDoFn extends OldDoFn<String, KV<String, MySerializableGeneric<String>>> {
@Override
public void processElement(ProcessContext c) { }
}
@@ -430,7 +430,7 @@ public class CoderRegistryTest {
PCollection<String>,
PCollection<KV<String, MySerializableGeneric<T>>>> {
- private class OutputDoFn extends DoFn<String, KV<String, MySerializableGeneric<T>>> {
+ private class OutputDoFn extends OldDoFn<String, KV<String, MySerializableGeneric<T>>> {
@Override
public void processElement(ProcessContext c) { }
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
index d6423e5..3e7fd50 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.testing.NeedsRunner;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.util.CloudObject;
import org.apache.beam.sdk.util.CoderUtils;
@@ -82,14 +82,14 @@ public class SerializableCoderTest implements Serializable {
}
}
- static class StringToRecord extends DoFn<String, MyRecord> {
+ static class StringToRecord extends OldDoFn<String, MyRecord> {
@Override
public void processElement(ProcessContext c) {
c.output(new MyRecord(c.element()));
}
}
- static class RecordToString extends DoFn<MyRecord, String> {
+ static class RecordToString extends OldDoFn<MyRecord, String> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().value);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java
index c7153f8..09405ab 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java
@@ -18,6 +18,7 @@
package org.apache.beam.sdk.io;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java
index cabfc21..fe9415b 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java
@@ -18,6 +18,7 @@
package org.apache.beam.sdk.io;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java
index 8fbed94..01e5fe5 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.io;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
index c5f7478..95f7454 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java
@@ -18,6 +18,7 @@
package org.apache.beam.sdk.io;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
@@ -28,9 +29,9 @@ import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.RunnableOnService;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Max;
import org.apache.beam.sdk.transforms.Min;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.RemoveDuplicates;
@@ -119,7 +120,7 @@ public class CountingInputTest {
assertThat(endTime.isAfter(startTime.plus(expectedRuntimeMillis)), is(true));
}
- private static class ElementValueDiff extends DoFn<Long, Long> {
+ private static class ElementValueDiff extends OldDoFn<Long, Long> {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output(c.element() - c.timestamp().getMillis());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
index 321f066..45f636f 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java
@@ -34,10 +34,10 @@ import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.RunnableOnService;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.Max;
import org.apache.beam.sdk.transforms.Min;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.RemoveDuplicates;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -159,7 +159,7 @@ public class CountingSourceTest {
p.run();
}
- private static class ElementValueDiff extends DoFn<Long, Long> {
+ private static class ElementValueDiff extends OldDoFn<Long, Long> {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output(c.element() - c.timestamp().getMillis());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java
index 7009023..f689f51 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.io;
import static org.apache.beam.sdk.testing.SourceTestUtils.assertSplitAtFractionExhaustive;
import static org.apache.beam.sdk.testing.SourceTestUtils.readFromSource;
+
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
index 9c75972..f8592c9 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.testing.CoderProperties;
import org.apache.beam.sdk.testing.NeedsRunner;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.util.PubsubClient;
import org.apache.beam.sdk.util.PubsubClient.OutgoingMessage;
@@ -58,7 +58,7 @@ public class PubsubUnboundedSinkTest {
private static final String ID_LABEL = "id";
private static final int NUM_SHARDS = 10;
- private static class Stamp extends DoFn<String, String> {
+ private static class Stamp extends OldDoFn<String, String> {
@Override
public void processElement(ProcessContext c) {
c.outputWithTimestamp(c.element(), new Instant(TIMESTAMP));
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java
index 237c025..a47ddf2 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java
@@ -37,6 +37,7 @@ import org.junit.runners.JUnit4;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
+
import javax.annotation.Nullable;
/**
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java
index a1f1f70..6ec3a71 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java
@@ -23,6 +23,7 @@ import static org.apache.beam.sdk.TestUtils.NO_INTS_ARRAY;
import static org.apache.beam.sdk.TestUtils.NO_LINES_ARRAY;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue;
+
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.startsWith;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java
index 0af0744..4b6e749 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.io;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
@@ -40,9 +41,9 @@ import org.apache.beam.sdk.options.PipelineOptionsFactoryTest.TestPipelineOption
import org.apache.beam.sdk.testing.NeedsRunner;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SimpleFunction;
@@ -101,14 +102,14 @@ public class WriteTest {
this.window = window;
}
- private static class AddArbitraryKey<T> extends DoFn<T, KV<Integer, T>> {
+ private static class AddArbitraryKey<T> extends OldDoFn<T, KV<Integer, T>> {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output(KV.of(ThreadLocalRandom.current().nextInt(), c.element()));
}
}
- private static class RemoveArbitraryKey<T> extends DoFn<KV<Integer, Iterable<T>>, T> {
+ private static class RemoveArbitraryKey<T> extends OldDoFn<KV<Integer, Iterable<T>>, T> {
@Override
public void processElement(ProcessContext c) throws Exception {
for (T s : c.element().getValue()) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java
index 98aee4e..ea0db73 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java
@@ -46,6 +46,7 @@ import java.nio.channels.WritableByteChannel;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java
index 22359dc..ec2902e 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java
@@ -18,6 +18,7 @@
package org.apache.beam.sdk.options;
import static com.google.common.base.Strings.isNullOrEmpty;
+
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java
index 546fe7d..8e1439b 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java
@@ -29,7 +29,6 @@ import com.google.api.services.bigquery.Bigquery.Datasets.Delete;
import com.google.api.services.storage.Storage;
import com.fasterxml.jackson.databind.ObjectMapper;
-
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java
index 8b8337e..0c1b596 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java
@@ -43,7 +43,6 @@ import com.google.common.collect.ListMultimap;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
-
import org.hamcrest.Matchers;
import org.junit.Rule;
import org.junit.Test;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java
index 687271c..b2efa61 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java
@@ -28,7 +28,6 @@ import com.google.common.collect.Sets;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.ObjectMapper;
-
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java
index 110f30a..c4c5c1c 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java
@@ -22,6 +22,7 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasKey;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasNamespace;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasType;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue;
+
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.hasItem;
@@ -43,7 +44,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
-
import org.hamcrest.Matchers;
import org.joda.time.Instant;
import org.junit.Rule;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/AggregatorPipelineExtractorTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/AggregatorPipelineExtractorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/AggregatorPipelineExtractorTest.java
index 74cc5e0..13476e2 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/AggregatorPipelineExtractorTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/AggregatorPipelineExtractorTest.java
@@ -27,9 +27,9 @@ import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.Pipeline.PipelineVisitor;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Max;
import org.apache.beam.sdk.transforms.Min;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
@@ -211,7 +211,7 @@ public class AggregatorPipelineExtractorTest {
}
}
- private static class AggregatorProvidingDoFn<InT, OuT> extends DoFn<InT, OuT> {
+ private static class AggregatorProvidingDoFn<InT, OuT> extends OldDoFn<InT, OuT> {
public <InputT, OutT> Aggregator<InputT, OutT> addAggregator(
CombineFn<InputT, ?, OutT> combiner) {
return createAggregator(randomName(), combiner);
@@ -222,7 +222,7 @@ public class AggregatorPipelineExtractorTest {
}
@Override
- public void processElement(DoFn<InT, OuT>.ProcessContext c) throws Exception {
+ public void processElement(OldDoFn<InT, OuT>.ProcessContext c) throws Exception {
fail();
}
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java
index 1070dab..acc2b48 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java
@@ -39,7 +39,6 @@ import org.apache.beam.sdk.values.TimestampedValue;
import com.google.common.collect.Iterables;
import com.fasterxml.jackson.annotation.JsonCreator;
-
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.junit.Rule;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java
index 043c06c..0bd7893 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java
@@ -31,7 +31,6 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Create;
import com.fasterxml.jackson.databind.ObjectMapper;
-
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import org.junit.Rule;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java
index 8c2451b..fc10d4b 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.transforms;
import static org.apache.beam.sdk.TestUtils.checkCombineFn;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
index 1a42947..5c8732f 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java
@@ -18,6 +18,7 @@
package org.apache.beam.sdk.transforms;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -53,7 +54,7 @@ import java.util.List;
*/
@RunWith(JUnit4.class)
public class ApproximateUniqueTest implements Serializable {
- // implements Serializable just to make it easy to use anonymous inner DoFn subclasses
+ // implements Serializable just to make it easy to use anonymous inner OldDoFn subclasses
@Test
public void testEstimationErrorToSampleSize() {
@@ -222,7 +223,7 @@ public class ApproximateUniqueTest implements Serializable {
.apply(View.<Long>asSingleton());
PCollection<KV<Long, Long>> approximateAndExact = approximate
- .apply(ParDo.of(new DoFn<Long, KV<Long, Long>>() {
+ .apply(ParDo.of(new OldDoFn<Long, KV<Long, Long>>() {
@Override
public void processElement(ProcessContext c) {
c.output(KV.of(c.element(), c.sideInput(exact)));
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
index 486c738..d6bf826 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java
@@ -461,7 +461,7 @@ public class CombineFnsTest {
}
private static class ExtractResultDoFn
- extends DoFn<KV<String, CoCombineResult>, KV<String, KV<Integer, String>>>{
+ extends OldDoFn<KV<String, CoCombineResult>, KV<String, KV<Integer, String>>> {
private final TupleTag<Integer> maxIntTag;
private final TupleTag<UserString> concatStringTag;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
index b453089..cb9928e 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java
@@ -25,6 +25,7 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.include
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
+
import static org.hamcrest.Matchers.hasItem;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
@@ -73,7 +74,6 @@ import com.google.common.collect.Sets;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-
import org.hamcrest.Matchers;
import org.joda.time.Duration;
import org.junit.Test;
@@ -117,7 +117,7 @@ public class CombineTest implements Serializable {
1, 1, 2, 3, 5, 8, 13, 21, 34, 55
};
- @Mock private DoFn<?, ?>.ProcessContext processContext;
+ @Mock private OldDoFn<?, ?>.ProcessContext processContext;
PCollection<KV<String, Integer>> createInput(Pipeline p,
KV<String, Integer>[] table) {
@@ -372,7 +372,7 @@ public class CombineTest implements Serializable {
pipeline.run();
}
- private static class FormatPaneInfo extends DoFn<Integer, String> {
+ private static class FormatPaneInfo extends OldDoFn<Integer, String> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element() + ": " + c.pane().isLast());
@@ -560,7 +560,7 @@ public class CombineTest implements Serializable {
pipeline.run();
}
- private static class GetLast extends DoFn<Integer, Integer> {
+ private static class GetLast extends OldDoFn<Integer, Integer> {
@Override
public void processElement(ProcessContext c) {
if (c.pane().isLast()) {
@@ -653,7 +653,7 @@ public class CombineTest implements Serializable {
PCollection<Integer> output = pipeline
.apply("CreateVoidMainInput", Create.of((Void) null))
- .apply("OutputSideInput", ParDo.of(new DoFn<Void, Integer>() {
+ .apply("OutputSideInput", ParDo.of(new OldDoFn<Void, Integer>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.sideInput(view));
@@ -1176,7 +1176,7 @@ public class CombineTest implements Serializable {
}
private static <T> PCollection<T> copy(PCollection<T> pc, final int n) {
- return pc.apply(ParDo.of(new DoFn<T, T>() {
+ return pc.apply(ParDo.of(new OldDoFn<T, T>() {
@Override
public void processElement(ProcessContext c) throws Exception {
for (int i = 0; i < n; i++) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
index 07ba002..cf65423 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java
@@ -229,7 +229,7 @@ public class CreateTest {
p.run();
}
- private static class PrintTimestamps extends DoFn<String, String> {
+ private static class PrintTimestamps extends OldDoFn<String, String> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element() + ":" + c.timestamp().getMillis());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnContextTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnContextTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnContextTest.java
deleted file mode 100644
index 2e588c7..0000000
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnContextTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.transforms;
-
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-
-/**
- * Tests for {@link DoFn.Context}.
- */
-@RunWith(JUnit4.class)
-public class DoFnContextTest {
-
- @Mock
- private Aggregator<Long, Long> agg;
-
- private DoFn<Object, Object> fn;
- private DoFn<Object, Object>.Context context;
-
- @Before
- public void setup() {
- MockitoAnnotations.initMocks(this);
-
- // Need to be real objects to call the constructor, and to reference the
- // outer instance of DoFn
- NoOpDoFn<Object, Object> noOpFn = new NoOpDoFn<>();
- DoFn<Object, Object>.Context noOpContext = noOpFn.context();
-
- fn = spy(noOpFn);
- context = spy(noOpContext);
- }
-
- @Test
- public void testSetupDelegateAggregatorsCreatesAndLinksDelegateAggregators() {
- Sum.SumLongFn combiner = new Sum.SumLongFn();
- Aggregator<Long, Long> delegateAggregator =
- fn.createAggregator("test", combiner);
-
- when(context.createAggregatorInternal("test", combiner)).thenReturn(agg);
-
- context.setupDelegateAggregators();
- delegateAggregator.addValue(1L);
-
- verify(agg).addValue(1L);
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
index bf9899c..2488042 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java
@@ -24,7 +24,7 @@ import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn.DelegatingAggregator;
+import org.apache.beam.sdk.transforms.OldDoFn.DelegatingAggregator;
import org.junit.Before;
import org.junit.Rule;
@@ -36,7 +36,7 @@ import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
/**
- * Tests for DoFn.DelegatingAggregator.
+ * Tests for OldDoFn.DelegatingAggregator.
*/
@RunWith(JUnit4.class)
public class DoFnDelegatingAggregatorTest {
@@ -54,7 +54,7 @@ public class DoFnDelegatingAggregatorTest {
@Test
public void testAddValueWithoutDelegateThrowsException() {
- DoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
String name = "agg";
CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
@@ -64,7 +64,7 @@ public class DoFnDelegatingAggregatorTest {
thrown.expect(IllegalStateException.class);
thrown.expectMessage("cannot be called");
- thrown.expectMessage("DoFn");
+ thrown.expectMessage("OldDoFn");
aggregator.addValue(21.2);
}
@@ -74,7 +74,7 @@ public class DoFnDelegatingAggregatorTest {
String name = "agg";
CombineFn<Long, ?, Long> combiner = mockCombineFn(Long.class);
- DoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
DelegatingAggregator<Long, Long> aggregator =
(DelegatingAggregator<Long, Long>) doFn.createAggregator(name, combiner);
@@ -91,7 +91,7 @@ public class DoFnDelegatingAggregatorTest {
String name = "agg";
CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
- DoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
DelegatingAggregator<Double, Double> aggregator =
(DelegatingAggregator<Double, Double>) doFn.createAggregator(name, combiner);
@@ -114,7 +114,7 @@ public class DoFnDelegatingAggregatorTest {
String name = "agg";
CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
- DoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
DelegatingAggregator<Double, Double> aggregator =
(DelegatingAggregator<Double, Double>) doFn.createAggregator(name, combiner);
@@ -127,7 +127,7 @@ public class DoFnDelegatingAggregatorTest {
String name = "agg";
CombineFn<Double, ?, Double> combiner = mockCombineFn(Double.class);
- DoFn<Void, Void> doFn = new NoOpDoFn<>();
+ OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
DelegatingAggregator<Double, Double> aggregator =
(DelegatingAggregator<Double, Double>) doFn.createAggregator(name, combiner);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
index 3238f2c..0cb3d7b 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnReflectorTest.java
@@ -158,7 +158,7 @@ public class DoFnReflectorTest {
@Test
public void testDoFnInvokersReused() throws Exception {
- // Ensures that we don't create a new Invoker class for every instance of the DoFn.
+ // Ensures that we don't create a new Invoker class for every instance of the OldDoFn.
IdentityParent fn1 = new IdentityParent();
IdentityParent fn2 = new IdentityParent();
DoFnReflector reflector1 = underTest(fn1);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java
deleted file mode 100644
index 9242ece..0000000
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java
+++ /dev/null
@@ -1,242 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.transforms;
-
-import static org.hamcrest.Matchers.empty;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.isA;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertThat;
-
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.Pipeline.PipelineExecutionException;
-import org.apache.beam.sdk.PipelineResult;
-import org.apache.beam.sdk.runners.AggregatorValues;
-import org.apache.beam.sdk.testing.NeedsRunner;
-import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.Max.MaxIntegerFn;
-import org.apache.beam.sdk.transforms.Sum.SumIntegerFn;
-import org.apache.beam.sdk.transforms.display.DisplayData;
-
-import com.google.common.collect.ImmutableMap;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-import java.io.Serializable;
-import java.util.Map;
-
-/**
- * Tests for DoFn.
- */
-@RunWith(JUnit4.class)
-public class DoFnTest implements Serializable {
-
- @Rule
- public transient ExpectedException thrown = ExpectedException.none();
-
- @Test
- public void testCreateAggregatorWithCombinerSucceeds() {
- String name = "testAggregator";
- Sum.SumLongFn combiner = new Sum.SumLongFn();
-
- DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
- Aggregator<Long, Long> aggregator = doFn.createAggregator(name, combiner);
-
- assertEquals(name, aggregator.getName());
- assertEquals(combiner, aggregator.getCombineFn());
- }
-
- @Test
- public void testCreateAggregatorWithNullNameThrowsException() {
- thrown.expect(NullPointerException.class);
- thrown.expectMessage("name cannot be null");
-
- DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
- doFn.createAggregator(null, new Sum.SumLongFn());
- }
-
- @Test
- public void testCreateAggregatorWithNullCombineFnThrowsException() {
- CombineFn<Object, Object, Object> combiner = null;
-
- thrown.expect(NullPointerException.class);
- thrown.expectMessage("combiner cannot be null");
-
- DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
- doFn.createAggregator("testAggregator", combiner);
- }
-
- @Test
- public void testCreateAggregatorWithNullSerializableFnThrowsException() {
- SerializableFunction<Iterable<Object>, Object> combiner = null;
-
- thrown.expect(NullPointerException.class);
- thrown.expectMessage("combiner cannot be null");
-
- DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
- doFn.createAggregator("testAggregator", combiner);
- }
-
- @Test
- public void testCreateAggregatorWithSameNameThrowsException() {
- String name = "testAggregator";
- CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
-
- DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
- doFn.createAggregator(name, combiner);
-
- thrown.expect(IllegalArgumentException.class);
- thrown.expectMessage("Cannot create");
- thrown.expectMessage(name);
- thrown.expectMessage("already exists");
-
- doFn.createAggregator(name, combiner);
- }
-
- @Test
- public void testCreateAggregatorsWithDifferentNamesSucceeds() {
- String nameOne = "testAggregator";
- String nameTwo = "aggregatorPrime";
- CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
-
- DoFn<Void, Void> doFn = new NoOpDoFn<>();
-
- Aggregator<Double, Double> aggregatorOne =
- doFn.createAggregator(nameOne, combiner);
- Aggregator<Double, Double> aggregatorTwo =
- doFn.createAggregator(nameTwo, combiner);
-
- assertNotEquals(aggregatorOne, aggregatorTwo);
- }
-
- @Test
- @Category(NeedsRunner.class)
- public void testCreateAggregatorInStartBundleThrows() {
- TestPipeline p = createTestPipeline(new DoFn<String, String>() {
- @Override
- public void startBundle(DoFn<String, String>.Context c) throws Exception {
- createAggregator("anyAggregate", new MaxIntegerFn());
- }
-
- @Override
- public void processElement(DoFn<String, String>.ProcessContext c) throws Exception {}
- });
-
- thrown.expect(PipelineExecutionException.class);
- thrown.expectCause(isA(IllegalStateException.class));
-
- p.run();
- }
-
- @Test
- @Category(NeedsRunner.class)
- public void testCreateAggregatorInProcessElementThrows() {
- TestPipeline p = createTestPipeline(new DoFn<String, String>() {
- @Override
- public void processElement(ProcessContext c) throws Exception {
- createAggregator("anyAggregate", new MaxIntegerFn());
- }
- });
-
- thrown.expect(PipelineExecutionException.class);
- thrown.expectCause(isA(IllegalStateException.class));
-
- p.run();
- }
-
- @Test
- @Category(NeedsRunner.class)
- public void testCreateAggregatorInFinishBundleThrows() {
- TestPipeline p = createTestPipeline(new DoFn<String, String>() {
- @Override
- public void finishBundle(DoFn<String, String>.Context c) throws Exception {
- createAggregator("anyAggregate", new MaxIntegerFn());
- }
-
- @Override
- public void processElement(DoFn<String, String>.ProcessContext c) throws Exception {}
- });
-
- thrown.expect(PipelineExecutionException.class);
- thrown.expectCause(isA(IllegalStateException.class));
-
- p.run();
- }
-
- /**
- * Initialize a test pipeline with the specified {@link DoFn}.
- */
- private <InputT, OutputT> TestPipeline createTestPipeline(DoFn<InputT, OutputT> fn) {
- TestPipeline pipeline = TestPipeline.create();
- pipeline.apply(Create.of((InputT) null))
- .apply(ParDo.of(fn));
-
- return pipeline;
- }
-
- @Test
- public void testPopulateDisplayDataDefaultBehavior() {
- DoFn<String, String> usesDefault =
- new DoFn<String, String>() {
- @Override
- public void processElement(ProcessContext c) throws Exception {}
- };
-
- DisplayData data = DisplayData.from(usesDefault);
- assertThat(data.items(), empty());
- }
-
- @Test
- @Category(NeedsRunner.class)
- public void testAggregators() throws Exception {
- Pipeline pipeline = TestPipeline.create();
-
- CountOddsFn countOdds = new CountOddsFn();
- pipeline
- .apply(Create.of(1, 3, 5, 7, 2, 4, 6, 8, 10, 12, 14, 20, 42, 68, 100))
- .apply(ParDo.of(countOdds));
- PipelineResult result = pipeline.run();
-
- AggregatorValues<Integer> values = result.getAggregatorValues(countOdds.aggregator);
- assertThat(values.getValuesAtSteps(),
- equalTo((Map<String, Integer>) ImmutableMap.<String, Integer>of("ParDo(CountOdds)", 4)));
- }
-
- private static class CountOddsFn extends DoFn<Integer, Void> {
- @Override
- public void processElement(ProcessContext c) throws Exception {
- if (c.element() % 2 == 1) {
- aggregator.addValue(1);
- }
- }
-
- Aggregator<Integer, Integer> aggregator =
- createAggregator("odds", new SumIntegerFn());
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java
index 8460a7c..e379f11 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java
@@ -235,7 +235,7 @@ public class DoFnTesterTest {
final PCollectionView<Integer> value =
PCollectionViews.singletonView(
TestPipeline.create(), WindowingStrategy.globalDefault(), true, 0, VarIntCoder.of());
- DoFn<Integer, Integer> fn = new SideInputDoFn(value);
+ OldDoFn<Integer, Integer> fn = new SideInputDoFn(value);
DoFnTester<Integer, Integer> tester = DoFnTester.of(fn);
@@ -251,7 +251,7 @@ public class DoFnTesterTest {
final PCollectionView<Integer> value =
PCollectionViews.singletonView(
TestPipeline.create(), WindowingStrategy.globalDefault(), true, 0, VarIntCoder.of());
- DoFn<Integer, Integer> fn = new SideInputDoFn(value);
+ OldDoFn<Integer, Integer> fn = new SideInputDoFn(value);
DoFnTester<Integer, Integer> tester = DoFnTester.of(fn);
tester.setSideInput(value, GlobalWindow.INSTANCE, -2);
@@ -264,7 +264,7 @@ public class DoFnTesterTest {
assertThat(tester.peekOutputElements(), containsInAnyOrder(-2, -2, -2, -2));
}
- private static class SideInputDoFn extends DoFn<Integer, Integer> {
+ private static class SideInputDoFn extends OldDoFn<Integer, Integer> {
private final PCollectionView<Integer> value;
private SideInputDoFn(PCollectionView<Integer> value) {
@@ -278,9 +278,9 @@ public class DoFnTesterTest {
}
/**
- * A DoFn that adds values to an aggregator and converts input to String in processElement.
+ * A OldDoFn that adds values to an aggregator and converts input to String in processElement.
*/
- private static class CounterDoFn extends DoFn<Long, String> {
+ private static class CounterDoFn extends OldDoFn<Long, String> {
Aggregator<Long, Long> agg = createAggregator("ctr", new Sum.SumLongFn());
private final long startBundleVal;
private final long finishBundleVal;
[13/19] incubator-beam git commit: Port microbenchmarks to new
vocabulary
Posted by dh...@apache.org.
Port microbenchmarks to new vocabulary
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/e07c3397
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/e07c3397
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/e07c3397
Branch: refs/heads/master
Commit: e07c3397d268f50cc879362227a6887cc52f4a3b
Parents: 3236eec
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:29:51 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700
----------------------------------------------------------------------
.../transforms/DoFnReflectorBenchmark.java | 27 ++++++++++----------
1 file changed, 14 insertions(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/e07c3397/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
----------------------------------------------------------------------
diff --git a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
index fd75e95..233b8be 100644
--- a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
+++ b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
@@ -53,9 +53,10 @@ public class DoFnReflectorBenchmark {
private OldDoFn<String, String> oldDoFn = new UpperCaseOldDoFn();
private DoFn<String, String> doFn = new UpperCaseDoFn();
- private StubDoFnProcessContext stubDoFnContext = new StubDoFnProcessContext(oldDoFn, ELEMENT);
- private StubDoFnWithContextProcessContext stubDoFnWithContextContext =
- new StubDoFnWithContextProcessContext(doFn, ELEMENT);
+ private StubOldDoFnProcessContext stubOldDoFnContext = new StubOldDoFnProcessContext(oldDoFn,
+ ELEMENT);
+ private StubDoFnProcessContext stubDoFnContext =
+ new StubDoFnProcessContext(doFn, ELEMENT);
private ExtraContextFactory<String, String> extraContextFactory =
new ExtraContextFactory<String, String>() {
@@ -83,21 +84,21 @@ public class DoFnReflectorBenchmark {
}
@Benchmark
- public String invokeDoFn() throws Exception {
- oldDoFn.processElement(stubDoFnContext);
+ public String invokeOldDoFn() throws Exception {
+ oldDoFn.processElement(stubOldDoFnContext);
return stubDoFnContext.output;
}
@Benchmark
public String invokeDoFnWithContextViaAdaptor() throws Exception {
- adaptedDoFnWithContext.processElement(stubDoFnContext);
- return stubDoFnContext.output;
+ adaptedDoFnWithContext.processElement(stubOldDoFnContext);
+ return stubOldDoFnContext.output;
}
@Benchmark
public String invokeDoFnWithContext() throws Exception {
- invoker.invokeProcessElement(stubDoFnWithContextContext, extraContextFactory);
- return stubDoFnWithContextContext.output;
+ invoker.invokeProcessElement(stubDoFnContext, extraContextFactory);
+ return stubDoFnContext.output;
}
private static class UpperCaseOldDoFn extends OldDoFn<String, String> {
@@ -116,12 +117,12 @@ public class DoFnReflectorBenchmark {
}
}
- private static class StubDoFnProcessContext extends OldDoFn<String, String>.ProcessContext {
+ private static class StubOldDoFnProcessContext extends OldDoFn<String, String>.ProcessContext {
private final String element;
private String output;
- public StubDoFnProcessContext(OldDoFn<String, String> fn, String element) {
+ public StubOldDoFnProcessContext(OldDoFn<String, String> fn, String element) {
fn.super();
this.element = element;
}
@@ -186,12 +187,12 @@ public class DoFnReflectorBenchmark {
}
}
- private static class StubDoFnWithContextProcessContext
+ private static class StubDoFnProcessContext
extends DoFn<String, String>.ProcessContext {
private final String element;
private String output;
- public StubDoFnWithContextProcessContext(DoFn<String, String> fn, String element) {
+ public StubDoFnProcessContext(DoFn<String, String> fn, String element) {
fn.super();
this.element = element;
}
[08/19] incubator-beam git commit: Rename DoFn to OldDoFn
Posted by dh...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java
index 8a83e44..b27163a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnWithContext.java
@@ -24,7 +24,7 @@ import static com.google.common.base.Preconditions.checkState;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn.DelegatingAggregator;
+import org.apache.beam.sdk.transforms.OldDoFn.DelegatingAggregator;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.display.HasDisplayData;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -127,7 +127,7 @@ public abstract class DoFnWithContext<InputT, OutputT> implements Serializable,
*
* <p>If invoked from {@link ProcessElement}), the timestamp
* must not be older than the input element's timestamp minus
- * {@link DoFn#getAllowedTimestampSkew}. The output element will
+ * {@link OldDoFn#getAllowedTimestampSkew}. The output element will
* be in the same windows as the input element.
*
* <p>If invoked from {@link StartBundle} or {@link FinishBundle},
@@ -176,7 +176,7 @@ public abstract class DoFnWithContext<InputT, OutputT> implements Serializable,
*
* <p>If invoked from {@link ProcessElement}), the timestamp
* must not be older than the input element's timestamp minus
- * {@link DoFn#getAllowedTimestampSkew}. The output element will
+ * {@link OldDoFn#getAllowedTimestampSkew}. The output element will
* be in the same windows as the input element.
*
* <p>If invoked from {@link StartBundle} or {@link FinishBundle},
@@ -194,7 +194,7 @@ public abstract class DoFnWithContext<InputT, OutputT> implements Serializable,
}
/**
- * Information accessible when running {@link DoFn#processElement}.
+ * Information accessible when running {@link OldDoFn#processElement}.
*/
public abstract class ProcessContext extends Context {
@@ -358,13 +358,13 @@ public abstract class DoFnWithContext<InputT, OutputT> implements Serializable,
/**
* Returns an {@link Aggregator} with aggregation logic specified by the
* {@link CombineFn} argument. The name provided must be unique across
- * {@link Aggregator}s created within the DoFn. Aggregators can only be created
+ * {@link Aggregator}s created within the OldDoFn. Aggregators can only be created
* during pipeline construction.
*
* @param name the name of the aggregator
* @param combiner the {@link CombineFn} to use in the aggregator
* @return an aggregator for the provided name and combiner in the scope of
- * this DoFn
+ * this OldDoFn
* @throws NullPointerException if the name or combiner is null
* @throws IllegalArgumentException if the given name collides with another
* aggregator in this scope
@@ -391,13 +391,13 @@ public abstract class DoFnWithContext<InputT, OutputT> implements Serializable,
/**
* Returns an {@link Aggregator} with the aggregation logic specified by the
* {@link SerializableFunction} argument. The name provided must be unique
- * across {@link Aggregator}s created within the DoFn. Aggregators can only be
+ * across {@link Aggregator}s created within the OldDoFn. Aggregators can only be
* created during pipeline construction.
*
* @param name the name of the aggregator
* @param combiner the {@link SerializableFunction} to use in the aggregator
* @return an aggregator for the provided name and combiner in the scope of
- * this DoFn
+ * this OldDoFn
* @throws NullPointerException if the name or combiner is null
* @throws IllegalArgumentException if the given name collides with another
* aggregator in this scope
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
index a31799e..4466874 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Filter.java
@@ -202,7 +202,7 @@ public class Filter<T> extends PTransform<PCollection<T>, PCollection<T>> {
@Override
public PCollection<T> apply(PCollection<T> input) {
- PCollection<T> output = input.apply(ParDo.of(new DoFn<T, T>() {
+ PCollection<T> output = input.apply(ParDo.of(new OldDoFn<T, T>() {
@Override
public void processElement(ProcessContext c) {
if (predicate.apply(c.element()) == true) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
index 4f270a7..b48da38 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
@@ -133,7 +133,7 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
@Override
public PCollection<OutputT> apply(PCollection<InputT> input) {
- return input.apply("Map", ParDo.of(new DoFn<InputT, OutputT>() {
+ return input.apply("Map", ParDo.of(new OldDoFn<InputT, OutputT>() {
private static final long serialVersionUID = 0L;
@Override
public void processElement(ProcessContext c) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
index 0b83fb6..53e898e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
@@ -174,7 +174,7 @@ public class Flatten {
Coder<T> elemCoder = ((IterableLikeCoder<T, ?>) inCoder).getElemCoder();
return in.apply("FlattenIterables", ParDo.of(
- new DoFn<Iterable<T>, T>() {
+ new OldDoFn<Iterable<T>, T>() {
@Override
public void processElement(ProcessContext c) {
for (T i : c.element()) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupByKey.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupByKey.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupByKey.java
index 8ad57d2..ed7f411 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupByKey.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/GroupByKey.java
@@ -68,7 +68,7 @@ import org.apache.beam.sdk.values.PCollection.IsBounded;
* PCollection<KV<String, Iterable<Doc>>> urlToDocs =
* urlDocPairs.apply(GroupByKey.<String, Doc>create());
* PCollection<R> results =
- * urlToDocs.apply(ParDo.of(new DoFn<KV<String, Iterable<Doc>>, R>() {
+ * urlToDocs.apply(ParDo.of(new OldDoFn<KV<String, Iterable<Doc>>, R>() {
* public void processElement(ProcessContext c) {
* String url = c.element().getKey();
* Iterable<Doc> docsWithThatUrl = c.element().getValue();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java
index ef1e3c6..b5fe60f 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java
@@ -40,7 +40,7 @@ import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicReference;
/**
- * Provides multi-threading of {@link DoFn}s, using threaded execution to
+ * Provides multi-threading of {@link OldDoFn}s, using threaded execution to
* process multiple elements concurrently within a bundle.
*
* <p>Note, that each Dataflow worker will already process multiple bundles
@@ -57,7 +57,7 @@ import java.util.concurrent.atomic.AtomicReference;
* share of the maximum write rate) will take at least 6 seconds to complete (there is additional
* overhead in the extra parallelization).
*
- * <p>To parallelize a {@link DoFn} to 10 threads:
+ * <p>To parallelize a {@link OldDoFn} to 10 threads:
* <pre>{@code
* PCollection<T> data = ...;
* data.apply(
@@ -65,18 +65,18 @@ import java.util.concurrent.atomic.AtomicReference;
* .withMaxParallelism(10)));
* }</pre>
*
- * <p>An uncaught exception from the wrapped {@link DoFn} will result in the exception
+ * <p>An uncaught exception from the wrapped {@link OldDoFn} will result in the exception
* being rethrown in later calls to {@link MultiThreadedIntraBundleProcessingDoFn#processElement}
* or a call to {@link MultiThreadedIntraBundleProcessingDoFn#finishBundle}.
*/
public class IntraBundleParallelization {
/**
* Creates a {@link IntraBundleParallelization} {@link PTransform} for the given
- * {@link DoFn} that processes elements using multiple threads.
+ * {@link OldDoFn} that processes elements using multiple threads.
*
* <p>Note that the specified {@code doFn} needs to be thread safe.
*/
- public static <InputT, OutputT> Bound<InputT, OutputT> of(DoFn<InputT, OutputT> doFn) {
+ public static <InputT, OutputT> Bound<InputT, OutputT> of(OldDoFn<InputT, OutputT> doFn) {
return new Unbound().of(doFn);
}
@@ -92,7 +92,7 @@ public class IntraBundleParallelization {
* An incomplete {@code IntraBundleParallelization} transform, with unbound input/output types.
*
* <p>Before being applied, {@link IntraBundleParallelization.Unbound#of} must be
- * invoked to specify the {@link DoFn} to invoke, which will also
+ * invoked to specify the {@link OldDoFn} to invoke, which will also
* bind the input/output types of this {@code PTransform}.
*/
public static class Unbound {
@@ -118,18 +118,18 @@ public class IntraBundleParallelization {
/**
* Returns a new {@link IntraBundleParallelization} {@link PTransform} like this one
- * with the specified {@link DoFn}.
+ * with the specified {@link OldDoFn}.
*
* <p>Note that the specified {@code doFn} needs to be thread safe.
*/
- public <InputT, OutputT> Bound<InputT, OutputT> of(DoFn<InputT, OutputT> doFn) {
+ public <InputT, OutputT> Bound<InputT, OutputT> of(OldDoFn<InputT, OutputT> doFn) {
return new Bound<>(doFn, maxParallelism);
}
}
/**
* A {@code PTransform} that, when applied to a {@code PCollection<InputT>},
- * invokes a user-specified {@code DoFn<InputT, OutputT>} on all its elements,
+ * invokes a user-specified {@code OldDoFn<InputT, OutputT>} on all its elements,
* with all its outputs collected into an output
* {@code PCollection<OutputT>}.
*
@@ -140,10 +140,10 @@ public class IntraBundleParallelization {
*/
public static class Bound<InputT, OutputT>
extends PTransform<PCollection<? extends InputT>, PCollection<OutputT>> {
- private final DoFn<InputT, OutputT> doFn;
+ private final OldDoFn<InputT, OutputT> doFn;
private final int maxParallelism;
- Bound(DoFn<InputT, OutputT> doFn, int maxParallelism) {
+ Bound(OldDoFn<InputT, OutputT> doFn, int maxParallelism) {
checkArgument(maxParallelism > 0,
"Expected parallelism factor greater than zero, received %s.", maxParallelism);
this.doFn = doFn;
@@ -160,12 +160,12 @@ public class IntraBundleParallelization {
/**
* Returns a new {@link IntraBundleParallelization} {@link PTransform} like this one
- * with the specified {@link DoFn}.
+ * with the specified {@link OldDoFn}.
*
* <p>Note that the specified {@code doFn} needs to be thread safe.
*/
public <NewInputT, NewOutputT> Bound<NewInputT, NewOutputT>
- of(DoFn<NewInputT, NewOutputT> doFn) {
+ of(OldDoFn<NewInputT, NewOutputT> doFn) {
return new Bound<>(doFn, maxParallelism);
}
@@ -188,17 +188,19 @@ public class IntraBundleParallelization {
}
/**
- * A multi-threaded {@code DoFn} wrapper.
+ * A multi-threaded {@code OldDoFn} wrapper.
*
- * @see IntraBundleParallelization#of(DoFn)
+ * @see IntraBundleParallelization#of(OldDoFn)
*
* @param <InputT> the type of the (main) input elements
* @param <OutputT> the type of the (main) output elements
*/
public static class MultiThreadedIntraBundleProcessingDoFn<InputT, OutputT>
- extends DoFn<InputT, OutputT> {
+ extends OldDoFn<InputT, OutputT> {
- public MultiThreadedIntraBundleProcessingDoFn(DoFn<InputT, OutputT> doFn, int maxParallelism) {
+ public MultiThreadedIntraBundleProcessingDoFn(
+ OldDoFn<InputT, OutputT> doFn,
+ int maxParallelism) {
checkArgument(maxParallelism > 0,
"Expected parallelism factor greater than zero, received %s.", maxParallelism);
this.doFn = doFn;
@@ -267,7 +269,7 @@ public class IntraBundleParallelization {
/////////////////////////////////////////////////////////////////////////////
/**
- * Wraps a DoFn context, forcing single-thread output so that threads don't
+ * Wraps a OldDoFn context, forcing single-thread output so that threads don't
* propagate through to downstream functions.
*/
private class WrappedContext extends ProcessContext {
@@ -347,7 +349,7 @@ public class IntraBundleParallelization {
}
}
- private final DoFn<InputT, OutputT> doFn;
+ private final OldDoFn<InputT, OutputT> doFn;
private int maxParallelism;
private transient ExecutorService executor;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
index 636e306..c8cbce8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Keys.java
@@ -58,7 +58,7 @@ public class Keys<K> extends PTransform<PCollection<? extends KV<K, ?>>,
@Override
public PCollection<K> apply(PCollection<? extends KV<K, ?>> in) {
return
- in.apply("Keys", ParDo.of(new DoFn<KV<K, ?>, K>() {
+ in.apply("Keys", ParDo.of(new OldDoFn<KV<K, ?>, K>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().getKey());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
index 9597c92..430d37b 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/KvSwap.java
@@ -62,7 +62,7 @@ public class KvSwap<K, V> extends PTransform<PCollection<KV<K, V>>,
@Override
public PCollection<KV<V, K>> apply(PCollection<KV<K, V>> in) {
return
- in.apply("KvSwap", ParDo.of(new DoFn<KV<K, V>, KV<V, K>>() {
+ in.apply("KvSwap", ParDo.of(new OldDoFn<KV<K, V>, KV<V, K>>() {
@Override
public void processElement(ProcessContext c) {
KV<K, V> e = c.element();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
index f535111..c83c39f 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
@@ -104,7 +104,7 @@ extends PTransform<PCollection<InputT>, PCollection<OutputT>> {
@Override
public PCollection<OutputT> apply(PCollection<InputT> input) {
- return input.apply("Map", ParDo.of(new DoFn<InputT, OutputT>() {
+ return input.apply("Map", ParDo.of(new OldDoFn<InputT, OutputT>() {
@Override
public void processElement(ProcessContext c) {
c.output(fn.apply(c.element()));
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
new file mode 100644
index 0000000..48c6033
--- /dev/null
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java
@@ -0,0 +1,565 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.transforms;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
+import org.apache.beam.sdk.annotations.Experimental;
+import org.apache.beam.sdk.annotations.Experimental.Kind;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.transforms.Combine.CombineFn;
+import org.apache.beam.sdk.transforms.display.DisplayData;
+import org.apache.beam.sdk.transforms.display.HasDisplayData;
+import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.transforms.windowing.PaneInfo;
+import org.apache.beam.sdk.util.WindowingInternals;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.TypeDescriptor;
+
+import com.google.common.base.MoreObjects;
+
+import org.joda.time.Duration;
+import org.joda.time.Instant;
+
+import java.io.Serializable;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+import java.util.UUID;
+
+/**
+ * The argument to {@link ParDo} providing the code to use to process
+ * elements of the input
+ * {@link org.apache.beam.sdk.values.PCollection}.
+ *
+ * <p>See {@link ParDo} for more explanation, examples of use, and
+ * discussion of constraints on {@code OldDoFn}s, including their
+ * serializability, lack of access to global shared mutable state,
+ * requirements for failure tolerance, and benefits of optimization.
+ *
+ * <p>{@code OldDoFn}s can be tested in the context of a particular
+ * {@code Pipeline} by running that {@code Pipeline} on sample input
+ * and then checking its output. Unit testing of a {@code OldDoFn},
+ * separately from any {@code ParDo} transform or {@code Pipeline},
+ * can be done via the {@link DoFnTester} harness.
+ *
+ * <p>{@link DoFnWithContext} (currently experimental) offers an alternative
+ * mechanism for accessing {@link ProcessContext#window()} without the need
+ * to implement {@link RequiresWindowAccess}.
+ *
+ * <p>See also {@link #processElement} for details on implementing the transformation
+ * from {@code InputT} to {@code OutputT}.
+ *
+ * @param <InputT> the type of the (main) input elements
+ * @param <OutputT> the type of the (main) output elements
+ */
+public abstract class OldDoFn<InputT, OutputT> implements Serializable, HasDisplayData {
+
+ /**
+ * Information accessible to all methods in this {@code OldDoFn}.
+ * Used primarily to output elements.
+ */
+ public abstract class Context {
+
+ /**
+ * Returns the {@code PipelineOptions} specified with the
+ * {@link org.apache.beam.sdk.runners.PipelineRunner}
+ * invoking this {@code OldDoFn}. The {@code PipelineOptions} will
+ * be the default running via {@link DoFnTester}.
+ */
+ public abstract PipelineOptions getPipelineOptions();
+
+ /**
+ * Adds the given element to the main output {@code PCollection}.
+ *
+ * <p>Once passed to {@code output} the element should be considered
+ * immutable and not be modified in any way. It may be cached or retained
+ * by the Dataflow runtime or later steps in the pipeline, or used in
+ * other unspecified ways.
+ *
+ * <p>If invoked from {@link OldDoFn#processElement processElement}, the output
+ * element will have the same timestamp and be in the same windows
+ * as the input element passed to {@link OldDoFn#processElement processElement}.
+ *
+ * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
+ * this will attempt to use the
+ * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+ * of the input {@code PCollection} to determine what windows the element
+ * should be in, throwing an exception if the {@code WindowFn} attempts
+ * to access any information about the input element. The output element
+ * will have a timestamp of negative infinity.
+ */
+ public abstract void output(OutputT output);
+
+ /**
+ * Adds the given element to the main output {@code PCollection},
+ * with the given timestamp.
+ *
+ * <p>Once passed to {@code outputWithTimestamp} the element should not be
+ * modified in any way.
+ *
+ * <p>If invoked from {@link OldDoFn#processElement processElement}, the timestamp
+ * must not be older than the input element's timestamp minus
+ * {@link OldDoFn#getAllowedTimestampSkew getAllowedTimestampSkew}. The output element will
+ * be in the same windows as the input element.
+ *
+ * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
+ * this will attempt to use the
+ * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+ * of the input {@code PCollection} to determine what windows the element
+ * should be in, throwing an exception if the {@code WindowFn} attempts
+ * to access any information about the input element except for the
+ * timestamp.
+ */
+ public abstract void outputWithTimestamp(OutputT output, Instant timestamp);
+
+ /**
+ * Adds the given element to the side output {@code PCollection} with the
+ * given tag.
+ *
+ * <p>Once passed to {@code sideOutput} the element should not be modified
+ * in any way.
+ *
+ * <p>The caller of {@code ParDo} uses {@link ParDo#withOutputTags withOutputTags} to
+ * specify the tags of side outputs that it consumes. Non-consumed side
+ * outputs, e.g., outputs for monitoring purposes only, don't necessarily
+ * need to be specified.
+ *
+ * <p>The output element will have the same timestamp and be in the same
+ * windows as the input element passed to {@link OldDoFn#processElement processElement}.
+ *
+ * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
+ * this will attempt to use the
+ * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+ * of the input {@code PCollection} to determine what windows the element
+ * should be in, throwing an exception if the {@code WindowFn} attempts
+ * to access any information about the input element. The output element
+ * will have a timestamp of negative infinity.
+ *
+ * @see ParDo#withOutputTags
+ */
+ public abstract <T> void sideOutput(TupleTag<T> tag, T output);
+
+ /**
+ * Adds the given element to the specified side output {@code PCollection},
+ * with the given timestamp.
+ *
+ * <p>Once passed to {@code sideOutputWithTimestamp} the element should not be
+ * modified in any way.
+ *
+ * <p>If invoked from {@link OldDoFn#processElement processElement}, the timestamp
+ * must not be older than the input element's timestamp minus
+ * {@link OldDoFn#getAllowedTimestampSkew getAllowedTimestampSkew}. The output element will
+ * be in the same windows as the input element.
+ *
+ * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
+ * this will attempt to use the
+ * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
+ * of the input {@code PCollection} to determine what windows the element
+ * should be in, throwing an exception if the {@code WindowFn} attempts
+ * to access any information about the input element except for the
+ * timestamp.
+ *
+ * @see ParDo#withOutputTags
+ */
+ public abstract <T> void sideOutputWithTimestamp(
+ TupleTag<T> tag, T output, Instant timestamp);
+
+ /**
+ * Creates an {@link Aggregator} in the {@link OldDoFn} context with the
+ * specified name and aggregation logic specified by {@link CombineFn}.
+ *
+ * <p>For internal use only.
+ *
+ * @param name the name of the aggregator
+ * @param combiner the {@link CombineFn} to use in the aggregator
+ * @return an aggregator for the provided name and {@link CombineFn} in this
+ * context
+ */
+ @Experimental(Kind.AGGREGATOR)
+ protected abstract <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
+ createAggregatorInternal(String name, CombineFn<AggInputT, ?, AggOutputT> combiner);
+
+ /**
+ * Sets up {@link Aggregator}s created by the {@link OldDoFn} so they are
+ * usable within this context.
+ *
+ * <p>This method should be called by runners before {@link OldDoFn#startBundle}
+ * is executed.
+ */
+ @Experimental(Kind.AGGREGATOR)
+ protected final void setupDelegateAggregators() {
+ for (DelegatingAggregator<?, ?> aggregator : aggregators.values()) {
+ setupDelegateAggregator(aggregator);
+ }
+
+ aggregatorsAreFinal = true;
+ }
+
+ private final <AggInputT, AggOutputT> void setupDelegateAggregator(
+ DelegatingAggregator<AggInputT, AggOutputT> aggregator) {
+
+ Aggregator<AggInputT, AggOutputT> delegate = createAggregatorInternal(
+ aggregator.getName(), aggregator.getCombineFn());
+
+ aggregator.setDelegate(delegate);
+ }
+ }
+
+ /**
+ * Information accessible when running {@link OldDoFn#processElement}.
+ */
+ public abstract class ProcessContext extends Context {
+
+ /**
+ * Returns the input element to be processed.
+ *
+ * <p>The element should be considered immutable. The Dataflow runtime will not mutate the
+ * element, so it is safe to cache, etc. The element should not be mutated by any of the
+ * {@link OldDoFn} methods, because it may be cached elsewhere, retained by the Dataflow
+ * runtime, or used in other unspecified ways.
+ */
+ public abstract InputT element();
+
+ /**
+ * Returns the value of the side input for the window corresponding to the
+ * window of the main input element.
+ *
+ * <p>See
+ * {@link org.apache.beam.sdk.transforms.windowing.WindowFn#getSideInputWindow}
+ * for how this corresponding window is determined.
+ *
+ * @throws IllegalArgumentException if this is not a side input
+ * @see ParDo#withSideInputs
+ */
+ public abstract <T> T sideInput(PCollectionView<T> view);
+
+ /**
+ * Returns the timestamp of the input element.
+ *
+ * <p>See {@link org.apache.beam.sdk.transforms.windowing.Window}
+ * for more information.
+ */
+ public abstract Instant timestamp();
+
+ /**
+ * Returns the window into which the input element has been assigned.
+ *
+ * <p>See {@link org.apache.beam.sdk.transforms.windowing.Window}
+ * for more information.
+ *
+ * @throws UnsupportedOperationException if this {@link OldDoFn} does
+ * not implement {@link RequiresWindowAccess}.
+ */
+ public abstract BoundedWindow window();
+
+ /**
+ * Returns information about the pane within this window into which the
+ * input element has been assigned.
+ *
+ * <p>Generally all data is in a single, uninteresting pane unless custom
+ * triggering and/or late data has been explicitly requested.
+ * See {@link org.apache.beam.sdk.transforms.windowing.Window}
+ * for more information.
+ */
+ public abstract PaneInfo pane();
+
+ /**
+ * Returns the process context to use for implementing windowing.
+ */
+ @Experimental
+ public abstract WindowingInternals<InputT, OutputT> windowingInternals();
+ }
+
+ /**
+ * Returns the allowed timestamp skew duration, which is the maximum
+ * duration that timestamps can be shifted backward in
+ * {@link OldDoFn.Context#outputWithTimestamp}.
+ *
+ * <p>The default value is {@code Duration.ZERO}, in which case
+ * timestamps can only be shifted forward to future. For infinite
+ * skew, return {@code Duration.millis(Long.MAX_VALUE)}.
+ *
+ * <p> Note that producing an element whose timestamp is less than the
+ * current timestamp may result in late data, i.e. returning a non-zero
+ * value here does not impact watermark calculations used for firing
+ * windows.
+ *
+ * @deprecated does not interact well with the watermark.
+ */
+ @Deprecated
+ public Duration getAllowedTimestampSkew() {
+ return Duration.ZERO;
+ }
+
+ /**
+ * Interface for signaling that a {@link OldDoFn} needs to access the window the
+ * element is being processed in, via {@link OldDoFn.ProcessContext#window}.
+ */
+ @Experimental
+ public interface RequiresWindowAccess {}
+
+ public OldDoFn() {
+ this(new HashMap<String, DelegatingAggregator<?, ?>>());
+ }
+
+ OldDoFn(Map<String, DelegatingAggregator<?, ?>> aggregators) {
+ this.aggregators = aggregators;
+ }
+
+ /////////////////////////////////////////////////////////////////////////////
+
+ private final Map<String, DelegatingAggregator<?, ?>> aggregators;
+
+ /**
+ * Protects aggregators from being created after initialization.
+ */
+ private boolean aggregatorsAreFinal;
+
+ /**
+ * Prepares this {@code OldDoFn} instance for processing a batch of elements.
+ *
+ * <p>By default, does nothing.
+ */
+ public void startBundle(Context c) throws Exception {
+ }
+
+ /**
+ * Processes one input element.
+ *
+ * <p>The current element of the input {@code PCollection} is returned by
+ * {@link ProcessContext#element() c.element()}. It should be considered immutable. The Dataflow
+ * runtime will not mutate the element, so it is safe to cache, etc. The element should not be
+ * mutated by any of the {@link OldDoFn} methods, because it may be cached elsewhere, retained by
+ * the Dataflow runtime, or used in other unspecified ways.
+ *
+ * <p>A value is added to the main output {@code PCollection} by {@link ProcessContext#output}.
+ * Once passed to {@code output} the element should be considered immutable and not be modified in
+ * any way. It may be cached elsewhere, retained by the Dataflow runtime, or used in other
+ * unspecified ways.
+ *
+ * @see ProcessContext
+ */
+ public abstract void processElement(ProcessContext c) throws Exception;
+
+ /**
+ * Finishes processing this batch of elements.
+ *
+ * <p>By default, does nothing.
+ */
+ public void finishBundle(Context c) throws Exception {
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * <p>By default, does not register any display data. Implementors may override this method
+ * to provide their own display data.
+ */
+ @Override
+ public void populateDisplayData(DisplayData.Builder builder) {
+ }
+
+ /////////////////////////////////////////////////////////////////////////////
+
+ /**
+ * Returns a {@link TypeDescriptor} capturing what is known statically
+ * about the input type of this {@code OldDoFn} instance's most-derived
+ * class.
+ *
+ * <p>See {@link #getOutputTypeDescriptor} for more discussion.
+ */
+ protected TypeDescriptor<InputT> getInputTypeDescriptor() {
+ return new TypeDescriptor<InputT>(getClass()) {};
+ }
+
+ /**
+ * Returns a {@link TypeDescriptor} capturing what is known statically
+ * about the output type of this {@code OldDoFn} instance's
+ * most-derived class.
+ *
+ * <p>In the normal case of a concrete {@code OldDoFn} subclass with
+ * no generic type parameters of its own (including anonymous inner
+ * classes), this will be a complete non-generic type, which is good
+ * for choosing a default output {@code Coder<OutputT>} for the output
+ * {@code PCollection<OutputT>}.
+ */
+ protected TypeDescriptor<OutputT> getOutputTypeDescriptor() {
+ return new TypeDescriptor<OutputT>(getClass()) {};
+ }
+
+ /**
+ * Returns an {@link Aggregator} with aggregation logic specified by the
+ * {@link CombineFn} argument. The name provided must be unique across
+ * {@link Aggregator}s created within the OldDoFn. Aggregators can only be created
+ * during pipeline construction.
+ *
+ * @param name the name of the aggregator
+ * @param combiner the {@link CombineFn} to use in the aggregator
+ * @return an aggregator for the provided name and combiner in the scope of
+ * this OldDoFn
+ * @throws NullPointerException if the name or combiner is null
+ * @throws IllegalArgumentException if the given name collides with another
+ * aggregator in this scope
+ * @throws IllegalStateException if called during pipeline processing.
+ */
+ protected final <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT>
+ createAggregator(String name, CombineFn<? super AggInputT, ?, AggOutputT> combiner) {
+ checkNotNull(name, "name cannot be null");
+ checkNotNull(combiner, "combiner cannot be null");
+ checkArgument(!aggregators.containsKey(name),
+ "Cannot create aggregator with name %s."
+ + " An Aggregator with that name already exists within this scope.",
+ name);
+
+ checkState(!aggregatorsAreFinal, "Cannot create an aggregator during OldDoFn processing."
+ + " Aggregators should be registered during pipeline construction.");
+
+ DelegatingAggregator<AggInputT, AggOutputT> aggregator =
+ new DelegatingAggregator<>(name, combiner);
+ aggregators.put(name, aggregator);
+ return aggregator;
+ }
+
+ /**
+ * Returns an {@link Aggregator} with the aggregation logic specified by the
+ * {@link SerializableFunction} argument. The name provided must be unique
+ * across {@link Aggregator}s created within the OldDoFn. Aggregators can only be
+ * created during pipeline construction.
+ *
+ * @param name the name of the aggregator
+ * @param combiner the {@link SerializableFunction} to use in the aggregator
+ * @return an aggregator for the provided name and combiner in the scope of
+ * this OldDoFn
+ * @throws NullPointerException if the name or combiner is null
+ * @throws IllegalArgumentException if the given name collides with another
+ * aggregator in this scope
+ * @throws IllegalStateException if called during pipeline processing.
+ */
+ protected final <AggInputT> Aggregator<AggInputT, AggInputT> createAggregator(String name,
+ SerializableFunction<Iterable<AggInputT>, AggInputT> combiner) {
+ checkNotNull(combiner, "combiner cannot be null.");
+ return createAggregator(name, Combine.IterableCombineFn.of(combiner));
+ }
+
+ /**
+ * Returns the {@link Aggregator Aggregators} created by this {@code OldDoFn}.
+ */
+ Collection<Aggregator<?, ?>> getAggregators() {
+ return Collections.<Aggregator<?, ?>>unmodifiableCollection(aggregators.values());
+ }
+
+ /**
+ * An {@link Aggregator} that delegates calls to addValue to another
+ * aggregator.
+ *
+ * @param <AggInputT> the type of input element
+ * @param <AggOutputT> the type of output element
+ */
+ static class DelegatingAggregator<AggInputT, AggOutputT> implements
+ Aggregator<AggInputT, AggOutputT>, Serializable {
+ private final UUID id;
+
+ private final String name;
+
+ private final CombineFn<AggInputT, ?, AggOutputT> combineFn;
+
+ private Aggregator<AggInputT, ?> delegate;
+
+ public DelegatingAggregator(String name,
+ CombineFn<? super AggInputT, ?, AggOutputT> combiner) {
+ this.id = UUID.randomUUID();
+ this.name = checkNotNull(name, "name cannot be null");
+ // Safe contravariant cast
+ @SuppressWarnings("unchecked")
+ CombineFn<AggInputT, ?, AggOutputT> specificCombiner =
+ (CombineFn<AggInputT, ?, AggOutputT>) checkNotNull(combiner, "combineFn cannot be null");
+ this.combineFn = specificCombiner;
+ }
+
+ @Override
+ public void addValue(AggInputT value) {
+ if (delegate == null) {
+ throw new IllegalStateException(
+ "addValue cannot be called on Aggregator outside of the execution of a OldDoFn.");
+ } else {
+ delegate.addValue(value);
+ }
+ }
+
+ @Override
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public CombineFn<AggInputT, ?, AggOutputT> getCombineFn() {
+ return combineFn;
+ }
+
+ /**
+ * Sets the current delegate of the Aggregator.
+ *
+ * @param delegate the delegate to set in this aggregator
+ */
+ public void setDelegate(Aggregator<AggInputT, ?> delegate) {
+ this.delegate = delegate;
+ }
+
+ @Override
+ public String toString() {
+ return MoreObjects.toStringHelper(getClass())
+ .add("name", name)
+ .add("combineFn", combineFn)
+ .toString();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, name, combineFn.getClass());
+ }
+
+ /**
+ * Indicates whether some other object is "equal to" this one.
+ *
+ * <p>{@code DelegatingAggregator} instances are equal if they have the same name, their
+ * CombineFns are the same class, and they have identical IDs.
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o == null) {
+ return false;
+ }
+ if (o instanceof DelegatingAggregator) {
+ DelegatingAggregator<?, ?> that = (DelegatingAggregator<?, ?>) o;
+ return Objects.equals(this.id, that.id)
+ && Objects.equals(this.name, that.name)
+ && Objects.equals(this.combineFn.getClass(), that.combineFn.getClass());
+ }
+ return false;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java
index fe6e8ad..12ab54d 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java
@@ -147,7 +147,7 @@ import java.io.Serializable;
* implementing {@code Serializable}.
*
* <p>{@code PTransform} is marked {@code Serializable} solely
- * because it is common for an anonymous {@code DoFn},
+ * because it is common for an anonymous {@code OldDoFn},
* instance to be created within an
* {@code apply()} method of a composite {@code PTransform}.
*
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
index 16dfcac..36d8101 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
@@ -52,13 +52,13 @@ import java.util.List;
* <p>The {@link ParDo} processing style is similar to what happens inside
* the "Mapper" or "Reducer" class of a MapReduce-style algorithm.
*
- * <h2>{@link DoFn DoFns}</h2>
+ * <h2>{@link OldDoFn DoFns}</h2>
*
* <p>The function to use to process each element is specified by a
- * {@link DoFn DoFn<InputT, OutputT>}, primarily via its
- * {@link DoFn#processElement processElement} method. The {@link DoFn} may also
- * override the default implementations of {@link DoFn#startBundle startBundle}
- * and {@link DoFn#finishBundle finishBundle}.
+ * {@link OldDoFn OldDoFn<InputT, OutputT>}, primarily via its
+ * {@link OldDoFn#processElement processElement} method. The {@link OldDoFn} may also
+ * override the default implementations of {@link OldDoFn#startBundle startBundle}
+ * and {@link OldDoFn#finishBundle finishBundle}.
*
* <p>Conceptually, when a {@link ParDo} transform is executed, the
* elements of the input {@link PCollection} are first divided up
@@ -67,26 +67,27 @@ import java.util.List;
* For each bundle of input elements processing proceeds as follows:
*
* <ol>
- * <li>If required, a fresh instance of the argument {@link DoFn} is created
+ * <li>If required, a fresh instance of the argument {@link OldDoFn} is created
* on a worker. This may be through deserialization or other means. A
- * {@link PipelineRunner} may reuse {@link DoFn} instances for multiple bundles.
- * A {@link DoFn} that has terminated abnormally (by throwing an {@link Exception}
+ * {@link PipelineRunner} may reuse {@link OldDoFn} instances for multiple bundles.
+ * A {@link OldDoFn} that has terminated abnormally (by throwing an {@link Exception}
* will never be reused.</li>
- * <li>The {@link DoFn DoFn's} {@link DoFn#startBundle} method is called to
+ * <li>The {@link OldDoFn OldDoFn's} {@link OldDoFn#startBundle} method is called to
* initialize it. If this method is not overridden, the call may be optimized
* away.</li>
- * <li>The {@link DoFn DoFn's} {@link DoFn#processElement} method
+ * <li>The {@link OldDoFn OldDoFn's} {@link OldDoFn#processElement} method
* is called on each of the input elements in the bundle.</li>
- * <li>The {@link DoFn DoFn's} {@link DoFn#finishBundle} method is called
- * to complete its work. After {@link DoFn#finishBundle} is called, the
- * framework will not again invoke {@link DoFn#processElement} or {@link DoFn#finishBundle}
- * until a new call to {@link DoFn#startBundle} has occurred.
+ * <li>The {@link OldDoFn OldDoFn's} {@link OldDoFn#finishBundle} method is called
+ * to complete its work. After {@link OldDoFn#finishBundle} is called, the
+ * framework will not again invoke {@link OldDoFn#processElement} or
+ * {@link OldDoFn#finishBundle}
+ * until a new call to {@link OldDoFn#startBundle} has occurred.
* If this method is not overridden, this call may be optimized away.</li>
* </ol>
*
- * Each of the calls to any of the {@link DoFn DoFn's} processing
+ * Each of the calls to any of the {@link OldDoFn OldDoFn's} processing
* methods can produce zero or more output elements. All of the
- * of output elements from all of the {@link DoFn} instances
+ * of output elements from all of the {@link OldDoFn} instances
* are included in the output {@link PCollection}.
*
* <p>For example:
@@ -94,7 +95,7 @@ import java.util.List;
* <pre> {@code
* PCollection<String> lines = ...;
* PCollection<String> words =
- * lines.apply(ParDo.of(new DoFn<String, String>() {
+ * lines.apply(ParDo.of(new OldDoFn<String, String>() {
* public void processElement(ProcessContext c) {
* String line = c.element();
* for (String word : line.split("[^a-zA-Z']+")) {
@@ -102,7 +103,7 @@ import java.util.List;
* }
* }}));
* PCollection<Integer> wordLengths =
- * words.apply(ParDo.of(new DoFn<String, Integer>() {
+ * words.apply(ParDo.of(new OldDoFn<String, Integer>() {
* public void processElement(ProcessContext c) {
* String word = c.element();
* Integer length = word.length();
@@ -127,9 +128,9 @@ import java.util.List;
*
* <pre> {@code
* PCollection<String> words =
- * lines.apply("ExtractWords", ParDo.of(new DoFn<String, String>() { ... }));
+ * lines.apply("ExtractWords", ParDo.of(new OldDoFn<String, String>() { ... }));
* PCollection<Integer> wordLengths =
- * words.apply("ComputeWordLengths", ParDo.of(new DoFn<String, Integer>() { ... }));
+ * words.apply("ComputeWordLengths", ParDo.of(new OldDoFn<String, Integer>() { ... }));
* } </pre>
*
* <h2>Side Inputs</h2>
@@ -141,7 +142,7 @@ import java.util.List;
* {@link PCollection PCollections} computed by earlier pipeline operations,
* passed in to the {@link ParDo} transform using
* {@link #withSideInputs}, and their contents accessible to each of
- * the {@link DoFn} operations via {@link DoFn.ProcessContext#sideInput sideInput}.
+ * the {@link OldDoFn} operations via {@link OldDoFn.ProcessContext#sideInput sideInput}.
* For example:
*
* <pre> {@code
@@ -151,7 +152,7 @@ import java.util.List;
* maxWordLengthCutOff.apply(View.<Integer>asSingleton());
* PCollection<String> wordsBelowCutOff =
* words.apply(ParDo.withSideInputs(maxWordLengthCutOffView)
- * .of(new DoFn<String, String>() {
+ * .of(new OldDoFn<String, String>() {
* public void processElement(ProcessContext c) {
* String word = c.element();
* int lengthCutOff = c.sideInput(maxWordLengthCutOffView);
@@ -170,11 +171,11 @@ import java.util.List;
* and bundled in a {@link PCollectionTuple}. The {@link TupleTag TupleTags}
* to be used for the output {@link PCollectionTuple} are specified by
* invoking {@link #withOutputTags}. Unconsumed side outputs do not
- * necessarily need to be explicitly specified, even if the {@link DoFn}
- * generates them. Within the {@link DoFn}, an element is added to the
+ * necessarily need to be explicitly specified, even if the {@link OldDoFn}
+ * generates them. Within the {@link OldDoFn}, an element is added to the
* main output {@link PCollection} as normal, using
- * {@link DoFn.Context#output}, while an element is added to a side output
- * {@link PCollection} using {@link DoFn.Context#sideOutput}. For example:
+ * {@link OldDoFn.Context#output}, while an element is added to a side output
+ * {@link PCollection} using {@link OldDoFn.Context#sideOutput}. For example:
*
* <pre> {@code
* PCollection<String> words = ...;
@@ -197,7 +198,7 @@ import java.util.List;
* .withOutputTags(wordsBelowCutOffTag,
* TupleTagList.of(wordLengthsAboveCutOffTag)
* .and(markedWordsTag))
- * .of(new DoFn<String, String>() {
+ * .of(new OldDoFn<String, String>() {
* // Create a tag for the unconsumed side output.
* final TupleTag<String> specialWordsTag =
* new TupleTag<String>(){};
@@ -232,7 +233,7 @@ import java.util.List;
*
* <p>Several properties can be specified for a {@link ParDo}
* {@link PTransform}, including name, side inputs, side output tags,
- * and {@link DoFn} to invoke. Only the {@link DoFn} is required; the
+ * and {@link OldDoFn} to invoke. Only the {@link OldDoFn} is required; the
* name is encouraged but not required, and side inputs and side
* output tags are only specified when they're needed. These
* properties can be specified in any order, as long as they're
@@ -246,23 +247,23 @@ import java.util.List;
* {@link ParDo.Bound} nested classes, each of which offer
* property setter instance methods to enable setting additional
* properties. {@link ParDo.Bound} is used for {@link ParDo}
- * transforms whose {@link DoFn} is specified and whose input and
+ * transforms whose {@link OldDoFn} is specified and whose input and
* output static types have been bound. {@link ParDo.Unbound ParDo.Unbound} is used
* for {@link ParDo} transforms that have not yet had their
- * {@link DoFn} specified. Only {@link ParDo.Bound} instances can be
+ * {@link OldDoFn} specified. Only {@link ParDo.Bound} instances can be
* applied.
*
* <p>Another benefit of this approach is that it reduces the number
* of type parameters that need to be specified manually. In
* particular, the input and output types of the {@link ParDo}
* {@link PTransform} are inferred automatically from the type
- * parameters of the {@link DoFn} argument passed to {@link ParDo#of}.
+ * parameters of the {@link OldDoFn} argument passed to {@link ParDo#of}.
*
* <h2>Output Coders</h2>
*
* <p>By default, the {@link Coder Coder<OutputT>} for the
* elements of the main output {@link PCollection PCollection<OutputT>} is
- * inferred from the concrete type of the {@link DoFn DoFn<InputT, OutputT>}.
+ * inferred from the concrete type of the {@link OldDoFn OldDoFn<InputT, OutputT>}.
*
* <p>By default, the {@link Coder Coder<SideOutputT>} for the elements of
* a side output {@link PCollection PCollection<SideOutputT>} is inferred
@@ -282,74 +283,74 @@ import java.util.List;
* This style of {@code TupleTag} instantiation is used in the example of
* multiple side outputs, above.
*
- * <h2>Serializability of {@link DoFn DoFns}</h2>
+ * <h2>Serializability of {@link OldDoFn DoFns}</h2>
*
- * <p>A {@link DoFn} passed to a {@link ParDo} transform must be
- * {@link Serializable}. This allows the {@link DoFn} instance
+ * <p>A {@link OldDoFn} passed to a {@link ParDo} transform must be
+ * {@link Serializable}. This allows the {@link OldDoFn} instance
* created in this "main program" to be sent (in serialized form) to
* remote worker machines and reconstituted for bundles of elements
- * of the input {@link PCollection} being processed. A {@link DoFn}
+ * of the input {@link PCollection} being processed. A {@link OldDoFn}
* can have instance variable state, and non-transient instance
* variable state will be serialized in the main program and then
* deserialized on remote worker machines for some number of bundles
* of elements to process.
*
- * <p>{@link DoFn DoFns} expressed as anonymous inner classes can be
+ * <p>{@link OldDoFn DoFns} expressed as anonymous inner classes can be
* convenient, but due to a quirk in Java's rules for serializability,
* non-static inner or nested classes (including anonymous inner
* classes) automatically capture their enclosing class's instance in
* their serialized state. This can lead to including much more than
- * intended in the serialized state of a {@link DoFn}, or even things
+ * intended in the serialized state of a {@link OldDoFn}, or even things
* that aren't {@link Serializable}.
*
* <p>There are two ways to avoid unintended serialized state in a
- * {@link DoFn}:
+ * {@link OldDoFn}:
*
* <ul>
*
- * <li>Define the {@link DoFn} as a named, static class.
+ * <li>Define the {@link OldDoFn} as a named, static class.
*
- * <li>Define the {@link DoFn} as an anonymous inner class inside of
+ * <li>Define the {@link OldDoFn} as an anonymous inner class inside of
* a static method.
*
* </ul>
*
* <p>Both of these approaches ensure that there is no implicit enclosing
- * instance serialized along with the {@link DoFn} instance.
+ * instance serialized along with the {@link OldDoFn} instance.
*
* <p>Prior to Java 8, any local variables of the enclosing
* method referenced from within an anonymous inner class need to be
- * marked as {@code final}. If defining the {@link DoFn} as a named
+ * marked as {@code final}. If defining the {@link OldDoFn} as a named
* static class, such variables would be passed as explicit
* constructor arguments and stored in explicit instance variables.
*
* <p>There are three main ways to initialize the state of a
- * {@link DoFn} instance processing a bundle:
+ * {@link OldDoFn} instance processing a bundle:
*
* <ul>
*
* <li>Define instance variable state (including implicit instance
* variables holding final variables captured by an anonymous inner
- * class), initialized by the {@link DoFn}'s constructor (which is
+ * class), initialized by the {@link OldDoFn}'s constructor (which is
* implicit for an anonymous inner class). This state will be
- * automatically serialized and then deserialized in the {@code DoFn}
+ * automatically serialized and then deserialized in the {@code OldDoFn}
* instances created for bundles. This method is good for state
- * known when the original {@code DoFn} is created in the main
+ * known when the original {@code OldDoFn} is created in the main
* program, if it's not overly large. This is not suitable for any
- * state which must only be used for a single bundle, as {@link DoFn DoFn's}
+ * state which must only be used for a single bundle, as {@link OldDoFn OldDoFn's}
* may be used to process multiple bundles.
*
* <li>Compute the state as a singleton {@link PCollection} and pass it
- * in as a side input to the {@link DoFn}. This is good if the state
+ * in as a side input to the {@link OldDoFn}. This is good if the state
* needs to be computed by the pipeline, or if the state is very large
* and so is best read from file(s) rather than sent as part of the
- * {@code DoFn}'s serialized state.
+ * {@code OldDoFn}'s serialized state.
*
- * <li>Initialize the state in each {@link DoFn} instance, in
- * {@link DoFn#startBundle}. This is good if the initialization
+ * <li>Initialize the state in each {@link OldDoFn} instance, in
+ * {@link OldDoFn#startBundle}. This is good if the initialization
* doesn't depend on any information known only by the main program or
* computed by earlier pipeline operations, but is the same for all
- * instances of this {@link DoFn} for all program executions, say
+ * instances of this {@link OldDoFn} for all program executions, say
* setting up empty caches or initializing constant data.
*
* </ul>
@@ -362,13 +363,13 @@ import java.util.List;
* no support in the Google Cloud Dataflow system for communicating
* and synchronizing updates to shared state across worker machines,
* so programs should not access any mutable static variable state in
- * their {@link DoFn}, without understanding that the Java processes
+ * their {@link OldDoFn}, without understanding that the Java processes
* for the main program and workers will each have its own independent
* copy of such state, and there won't be any automatic copying of
* that state across Java processes. All information should be
- * communicated to {@link DoFn} instances via main and side inputs and
+ * communicated to {@link OldDoFn} instances via main and side inputs and
* serialized state, and all output should be communicated from a
- * {@link DoFn} instance via main and side outputs, in the absence of
+ * {@link OldDoFn} instance via main and side outputs, in the absence of
* external communication mechanisms written by user code.
*
* <h2>Fault Tolerance</h2>
@@ -378,23 +379,23 @@ import java.util.List;
* While individual failures are rare, the larger the job, the greater
* the chance that something, somewhere, will fail. The Google Cloud
* Dataflow service strives to mask such failures automatically,
- * principally by retrying failed {@link DoFn} bundle. This means
- * that a {@code DoFn} instance might process a bundle partially, then
+ * principally by retrying failed {@link OldDoFn} bundle. This means
+ * that a {@code OldDoFn} instance might process a bundle partially, then
* crash for some reason, then be rerun (often on a different worker
* machine) on that same bundle and on the same elements as before.
- * Sometimes two or more {@link DoFn} instances will be running on the
+ * Sometimes two or more {@link OldDoFn} instances will be running on the
* same bundle simultaneously, with the system taking the results of
* the first instance to complete successfully. Consequently, the
- * code in a {@link DoFn} needs to be written such that these
+ * code in a {@link OldDoFn} needs to be written such that these
* duplicate (sequential or concurrent) executions do not cause
- * problems. If the outputs of a {@link DoFn} are a pure function of
+ * problems. If the outputs of a {@link OldDoFn} are a pure function of
* its inputs, then this requirement is satisfied. However, if a
- * {@link DoFn DoFn's} execution has external side-effects, such as performing
- * updates to external HTTP services, then the {@link DoFn DoFn's} code
+ * {@link OldDoFn OldDoFn's} execution has external side-effects, such as performing
+ * updates to external HTTP services, then the {@link OldDoFn OldDoFn's} code
* needs to take care to ensure that those updates are idempotent and
* that concurrent updates are acceptable. This property can be
* difficult to achieve, so it is advisable to strive to keep
- * {@link DoFn DoFns} as pure functions as much as possible.
+ * {@link OldDoFn DoFns} as pure functions as much as possible.
*
* <h2>Optimization</h2>
*
@@ -439,15 +440,15 @@ public class ParDo {
*
* <p>Side inputs are {@link PCollectionView PCollectionViews}, whose contents are
* computed during pipeline execution and then made accessible to
- * {@link DoFn} code via {@link DoFn.ProcessContext#sideInput sideInput}. Each
- * invocation of the {@link DoFn} receives the same values for these
+ * {@link OldDoFn} code via {@link OldDoFn.ProcessContext#sideInput sideInput}. Each
+ * invocation of the {@link OldDoFn} receives the same values for these
* side inputs.
*
* <p>See the discussion of Side Inputs above for more explanation.
*
* <p>The resulting {@link PTransform} is incomplete, and its
* input/output types are not yet bound. Use
- * {@link ParDo.Unbound#of} to specify the {@link DoFn} to
+ * {@link ParDo.Unbound#of} to specify the {@link OldDoFn} to
* invoke, which will also bind the input/output types of this
* {@link PTransform}.
*/
@@ -460,13 +461,13 @@ public class ParDo {
*
* <p>Side inputs are {@link PCollectionView}s, whose contents are
* computed during pipeline execution and then made accessible to
- * {@code DoFn} code via {@link DoFn.ProcessContext#sideInput sideInput}.
+ * {@code OldDoFn} code via {@link OldDoFn.ProcessContext#sideInput sideInput}.
*
* <p>See the discussion of Side Inputs above for more explanation.
*
* <p>The resulting {@link PTransform} is incomplete, and its
* input/output types are not yet bound. Use
- * {@link ParDo.Unbound#of} to specify the {@link DoFn} to
+ * {@link ParDo.Unbound#of} to specify the {@link OldDoFn} to
* invoke, which will also bind the input/output types of this
* {@link PTransform}.
*/
@@ -482,11 +483,11 @@ public class ParDo {
*
* <p>{@link TupleTag TupleTags} are used to name (with its static element
* type {@code T}) each main and side output {@code PCollection<T>}.
- * This {@link PTransform PTransform's} {@link DoFn} emits elements to the main
+ * This {@link PTransform PTransform's} {@link OldDoFn} emits elements to the main
* output {@link PCollection} as normal, using
- * {@link DoFn.Context#output}. The {@link DoFn} emits elements to
+ * {@link OldDoFn.Context#output}. The {@link OldDoFn} emits elements to
* a side output {@code PCollection} using
- * {@link DoFn.Context#sideOutput}, passing that side output's tag
+ * {@link OldDoFn.Context#sideOutput}, passing that side output's tag
* as an argument. The result of invoking this {@link PTransform}
* will be a {@link PCollectionTuple}, and any of the the main and
* side output {@code PCollection}s can be retrieved from it via
@@ -497,7 +498,7 @@ public class ParDo {
*
* <p>The resulting {@link PTransform} is incomplete, and its input
* type is not yet bound. Use {@link ParDo.UnboundMulti#of}
- * to specify the {@link DoFn} to invoke, which will also bind the
+ * to specify the {@link OldDoFn} to invoke, which will also bind the
* input type of this {@link PTransform}.
*/
public static <OutputT> UnboundMulti<OutputT> withOutputTags(
@@ -508,24 +509,24 @@ public class ParDo {
/**
* Creates a {@link ParDo} {@link PTransform} that will invoke the
- * given {@link DoFn} function.
+ * given {@link OldDoFn} function.
*
* <p>The resulting {@link PTransform PTransform's} types have been bound, with the
* input being a {@code PCollection<InputT>} and the output a
* {@code PCollection<OutputT>}, inferred from the types of the argument
- * {@code DoFn<InputT, OutputT>}. It is ready to be applied, or further
+ * {@code OldDoFn<InputT, OutputT>}. It is ready to be applied, or further
* properties can be set on it first.
*/
- public static <InputT, OutputT> Bound<InputT, OutputT> of(DoFn<InputT, OutputT> fn) {
+ public static <InputT, OutputT> Bound<InputT, OutputT> of(OldDoFn<InputT, OutputT> fn) {
return of(fn, fn.getClass());
}
private static <InputT, OutputT> Bound<InputT, OutputT> of(
- DoFn<InputT, OutputT> fn, Class<?> fnClass) {
+ OldDoFn<InputT, OutputT> fn, Class<?> fnClass) {
return new Unbound().of(fn, fnClass);
}
- private static <InputT, OutputT> DoFn<InputT, OutputT>
+ private static <InputT, OutputT> OldDoFn<InputT, OutputT>
adapt(DoFnWithContext<InputT, OutputT> fn) {
return DoFnReflector.of(fn.getClass()).toDoFn(fn);
}
@@ -537,11 +538,11 @@ public class ParDo {
* <p>The resulting {@link PTransform PTransform's} types have been bound, with the
* input being a {@code PCollection<InputT>} and the output a
* {@code PCollection<OutputT>}, inferred from the types of the argument
- * {@code DoFn<InputT, OutputT>}. It is ready to be applied, or further
+ * {@code OldDoFn<InputT, OutputT>}. It is ready to be applied, or further
* properties can be set on it first.
*
* <p>{@link DoFnWithContext} is an experimental alternative to
- * {@link DoFn} which simplifies accessing the window of the element.
+ * {@link OldDoFn} which simplifies accessing the window of the element.
*/
@Experimental
public static <InputT, OutputT> Bound<InputT, OutputT> of(DoFnWithContext<InputT, OutputT> fn) {
@@ -552,7 +553,7 @@ public class ParDo {
* An incomplete {@link ParDo} transform, with unbound input/output types.
*
* <p>Before being applied, {@link ParDo.Unbound#of} must be
- * invoked to specify the {@link DoFn} to invoke, which will also
+ * invoked to specify the {@link OldDoFn} to invoke, which will also
* bind the input/output types of this {@link PTransform}.
*/
public static class Unbound {
@@ -614,18 +615,18 @@ public class ParDo {
/**
* Returns a new {@link ParDo} {@link PTransform} that's like this
- * transform but that will invoke the given {@link DoFn}
+ * transform but that will invoke the given {@link OldDoFn}
* function, and that has its input and output types bound. Does
* not modify this transform. The resulting {@link PTransform} is
* sufficiently specified to be applied, but more properties can
* still be specified.
*/
- public <InputT, OutputT> Bound<InputT, OutputT> of(DoFn<InputT, OutputT> fn) {
+ public <InputT, OutputT> Bound<InputT, OutputT> of(OldDoFn<InputT, OutputT> fn) {
return of(fn, fn.getClass());
}
private <InputT, OutputT> Bound<InputT, OutputT> of(
- DoFn<InputT, OutputT> fn, Class<?> fnClass) {
+ OldDoFn<InputT, OutputT> fn, Class<?> fnClass) {
return new Bound<>(name, sideInputs, fn, fnClass);
}
@@ -645,7 +646,7 @@ public class ParDo {
/**
* A {@link PTransform} that, when applied to a {@code PCollection<InputT>},
- * invokes a user-specified {@code DoFn<InputT, OutputT>} on all its elements,
+ * invokes a user-specified {@code OldDoFn<InputT, OutputT>} on all its elements,
* with all its outputs collected into an output
* {@code PCollection<OutputT>}.
*
@@ -659,12 +660,12 @@ public class ParDo {
extends PTransform<PCollection<? extends InputT>, PCollection<OutputT>> {
// Inherits name.
private final List<PCollectionView<?>> sideInputs;
- private final DoFn<InputT, OutputT> fn;
+ private final OldDoFn<InputT, OutputT> fn;
private final Class<?> fnClass;
Bound(String name,
List<PCollectionView<?>> sideInputs,
- DoFn<InputT, OutputT> fn,
+ OldDoFn<InputT, OutputT> fn,
Class<?> fnClass) {
super(name);
this.sideInputs = sideInputs;
@@ -746,9 +747,9 @@ public class ParDo {
/**
* {@inheritDoc}
*
- * <p>{@link ParDo} registers its internal {@link DoFn} as a subcomponent for display data.
- * {@link DoFn} implementations can register display data by overriding
- * {@link DoFn#populateDisplayData}.
+ * <p>{@link ParDo} registers its internal {@link OldDoFn} as a subcomponent for display data.
+ * {@link OldDoFn} implementations can register display data by overriding
+ * {@link OldDoFn#populateDisplayData}.
*/
@Override
public void populateDisplayData(Builder builder) {
@@ -756,7 +757,7 @@ public class ParDo {
ParDo.populateDisplayData(builder, fn, fnClass);
}
- public DoFn<InputT, OutputT> getFn() {
+ public OldDoFn<InputT, OutputT> getFn() {
return fn;
}
@@ -770,7 +771,7 @@ public class ParDo {
* input type.
*
* <p>Before being applied, {@link ParDo.UnboundMulti#of} must be
- * invoked to specify the {@link DoFn} to invoke, which will also
+ * invoked to specify the {@link OldDoFn} to invoke, which will also
* bind the input type of this {@link PTransform}.
*
* @param <OutputT> the type of the main output {@code PCollection} elements
@@ -827,16 +828,16 @@ public class ParDo {
/**
* Returns a new multi-output {@link ParDo} {@link PTransform}
* that's like this transform but that will invoke the given
- * {@link DoFn} function, and that has its input type bound.
+ * {@link OldDoFn} function, and that has its input type bound.
* Does not modify this transform. The resulting
* {@link PTransform} is sufficiently specified to be applied, but
* more properties can still be specified.
*/
- public <InputT> BoundMulti<InputT, OutputT> of(DoFn<InputT, OutputT> fn) {
+ public <InputT> BoundMulti<InputT, OutputT> of(OldDoFn<InputT, OutputT> fn) {
return of(fn, fn.getClass());
}
- public <InputT> BoundMulti<InputT, OutputT> of(DoFn<InputT, OutputT> fn, Class<?> fnClass) {
+ public <InputT> BoundMulti<InputT, OutputT> of(OldDoFn<InputT, OutputT> fn, Class<?> fnClass) {
return new BoundMulti<>(
name, sideInputs, mainOutputTag, sideOutputTags, fn, fnClass);
}
@@ -857,7 +858,7 @@ public class ParDo {
/**
* A {@link PTransform} that, when applied to a
* {@code PCollection<InputT>}, invokes a user-specified
- * {@code DoFn<InputT, OutputT>} on all its elements, which can emit elements
+ * {@code OldDoFn<InputT, OutputT>} on all its elements, which can emit elements
* to any of the {@link PTransform}'s main and side output
* {@code PCollection}s, which are bundled into a result
* {@code PCollectionTuple}.
@@ -871,14 +872,14 @@ public class ParDo {
private final List<PCollectionView<?>> sideInputs;
private final TupleTag<OutputT> mainOutputTag;
private final TupleTagList sideOutputTags;
- private final DoFn<InputT, OutputT> fn;
+ private final OldDoFn<InputT, OutputT> fn;
private final Class<?> fnClass;
BoundMulti(String name,
List<PCollectionView<?>> sideInputs,
TupleTag<OutputT> mainOutputTag,
TupleTagList sideOutputTags,
- DoFn<InputT, OutputT> fn,
+ OldDoFn<InputT, OutputT> fn,
Class<?> fnClass) {
super(name);
this.sideInputs = sideInputs;
@@ -929,7 +930,7 @@ public class ParDo {
input.isBounded());
// The fn will likely be an instance of an anonymous subclass
- // such as DoFn<Integer, String> { }, thus will have a high-fidelity
+ // such as OldDoFn<Integer, String> { }, thus will have a high-fidelity
// TypeDescriptor for the output type.
outputs.get(mainOutputTag).setTypeDescriptorInternal(fn.getOutputTypeDescriptor());
@@ -970,7 +971,7 @@ public class ParDo {
ParDo.populateDisplayData(builder, fn, fnClass);
}
- public DoFn<InputT, OutputT> getFn() {
+ public OldDoFn<InputT, OutputT> getFn() {
return fn;
}
@@ -988,7 +989,7 @@ public class ParDo {
}
private static void populateDisplayData(
- DisplayData.Builder builder, DoFn<?, ?> fn, Class<?> fnClass) {
+ DisplayData.Builder builder, OldDoFn<?, ?> fn, Class<?> fnClass) {
builder
.include(fn)
.add(DisplayData.item("fn", fnClass)
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
index 6281b30..2ddcc29 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
@@ -134,7 +134,7 @@ public class Partition<T> extends PTransform<PCollection<T>, PCollectionList<T>>
this.partitionDoFn = partitionDoFn;
}
- private static class PartitionDoFn<X> extends DoFn<X, Void> {
+ private static class PartitionDoFn<X> extends OldDoFn<X, Void> {
private final int numPartitions;
private final PartitionFn<? super X> partitionFn;
private final TupleTagList outputTags;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
index b82744d..d82c457 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/RemoveDuplicates.java
@@ -85,7 +85,7 @@ public class RemoveDuplicates<T> extends PTransform<PCollection<T>,
@Override
public PCollection<T> apply(PCollection<T> in) {
return in
- .apply("CreateIndex", ParDo.of(new DoFn<T, KV<T, Void>>() {
+ .apply("CreateIndex", ParDo.of(new OldDoFn<T, KV<T, Void>>() {
@Override
public void processElement(ProcessContext c) {
c.output(KV.of(c.element(), (Void) null));
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
index 4fcd17e..724b252 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java
@@ -164,9 +164,9 @@ public class Sample {
}
/**
- * A {@link DoFn} that returns up to limit elements from the side input PCollection.
+ * A {@link OldDoFn} that returns up to limit elements from the side input PCollection.
*/
- private static class SampleAnyDoFn<T> extends DoFn<Void, T> {
+ private static class SampleAnyDoFn<T> extends OldDoFn<Void, T> {
long limit;
final PCollectionView<Iterable<T>> iterableView;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
index a879925..6623c6a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/SimpleFunction.java
@@ -29,7 +29,7 @@ public abstract class SimpleFunction<InputT, OutputT>
/**
* Returns a {@link TypeDescriptor} capturing what is known statically
- * about the input type of this {@code DoFn} instance's most-derived
+ * about the input type of this {@code OldDoFn} instance's most-derived
* class.
*
* <p>See {@link #getOutputTypeDescriptor} for more discussion.
@@ -40,10 +40,10 @@ public abstract class SimpleFunction<InputT, OutputT>
/**
* Returns a {@link TypeDescriptor} capturing what is known statically
- * about the output type of this {@code DoFn} instance's
+ * about the output type of this {@code OldDoFn} instance's
* most-derived class.
*
- * <p>In the normal case of a concrete {@code DoFn} subclass with
+ * <p>In the normal case of a concrete {@code OldDoFn} subclass with
* no generic type parameters of its own (including anonymous inner
* classes), this will be a complete non-generic type, which is good
* for choosing a default output {@code Coder<OutputT>} for the output
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
index 5212261..856e32a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Values.java
@@ -58,7 +58,7 @@ public class Values<V> extends PTransform<PCollection<? extends KV<?, V>>,
@Override
public PCollection<V> apply(PCollection<? extends KV<?, V>> in) {
return
- in.apply("Values", ParDo.of(new DoFn<KV<?, V>, V>() {
+ in.apply("Values", ParDo.of(new OldDoFn<KV<?, V>, V>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().getValue());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
index 7a97c13..8a61637 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java
@@ -38,7 +38,7 @@ import java.util.Map;
*
* <p>When a {@link ParDo} tranform is processing a main input
* element in a window {@code w} and a {@link PCollectionView} is read via
- * {@link DoFn.ProcessContext#sideInput}, the value of the view for {@code w} is
+ * {@link OldDoFn.ProcessContext#sideInput}, the value of the view for {@code w} is
* returned.
*
* <p>The SDK supports viewing a {@link PCollection}, per window, as a single value,
@@ -118,7 +118,7 @@ import java.util.Map;
*
* PCollection PageVisits = urlVisits
* .apply(ParDo.withSideInputs(urlToPage)
- * .of(new DoFn<UrlVisit, PageVisit>() {
+ * .of(new OldDoFn<UrlVisit, PageVisit>() {
* {@literal @}Override
* void processElement(ProcessContext context) {
* UrlVisit urlVisit = context.element();
@@ -154,11 +154,11 @@ public class View {
*
* <p>If the input {@link PCollection} is empty,
* throws {@link java.util.NoSuchElementException} in the consuming
- * {@link DoFn}.
+ * {@link OldDoFn}.
*
* <p>If the input {@link PCollection} contains more than one
* element, throws {@link IllegalArgumentException} in the
- * consuming {@link DoFn}.
+ * consuming {@link OldDoFn}.
*/
public static <T> AsSingleton<T> asSingleton() {
return new AsSingleton<>();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
index 25116d8..37d45aa 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithKeys.java
@@ -113,7 +113,7 @@ public class WithKeys<K, V> extends PTransform<PCollection<V>,
@Override
public PCollection<KV<K, V>> apply(PCollection<V> in) {
PCollection<KV<K, V>> result =
- in.apply("AddKeys", ParDo.of(new DoFn<V, KV<K, V>>() {
+ in.apply("AddKeys", ParDo.of(new OldDoFn<V, KV<K, V>>() {
@Override
public void processElement(ProcessContext c) {
c.output(KV.of(fn.apply(c.element()),
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
index ef4b269..41b549b 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java
@@ -92,7 +92,7 @@ public class WithTimestamps<T> extends PTransform<PCollection<T>, PCollection<T>
* Returns the allowed timestamp skew duration, which is the maximum
* duration that timestamps can be shifted backwards from the timestamp of the input element.
*
- * @see DoFn#getAllowedTimestampSkew()
+ * @see OldDoFn#getAllowedTimestampSkew()
*/
public Duration getAllowedTimestampSkew() {
return allowedTimestampSkew;
@@ -105,7 +105,7 @@ public class WithTimestamps<T> extends PTransform<PCollection<T>, PCollection<T>
.setTypeDescriptorInternal(input.getTypeDescriptor());
}
- private static class AddTimestampsDoFn<T> extends DoFn<T, T> {
+ private static class AddTimestampsDoFn<T> extends OldDoFn<T, T> {
private final SerializableFunction<T, Instant> fn;
private final Duration allowedTimestampSkew;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java
index ee7323b..5dcaec8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java
@@ -30,7 +30,6 @@ import com.google.common.collect.Sets;
import com.fasterxml.jackson.annotation.JsonGetter;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonValue;
-
import org.apache.avro.reflect.Nullable;
import org.joda.time.Duration;
import org.joda.time.Instant;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java
index 5e4cb52..aa26cbb 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java
@@ -37,7 +37,6 @@ import com.google.common.collect.PeekingIterator;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
index ba4a4a7..1bd9f4a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java
@@ -19,9 +19,9 @@ package org.apache.beam.sdk.transforms.join;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.join.CoGbkResult.CoGbkResultCoder;
@@ -57,7 +57,7 @@ import java.util.List;
*
* PCollection<T> finalResultCollection =
* coGbkResultCollection.apply(ParDo.of(
- * new DoFn<KV<K, CoGbkResult>, T>() {
+ * new OldDoFn<KV<K, CoGbkResult>, T>() {
* @Override
* public void processElement(ProcessContext c) {
* KV<K, CoGbkResult> e = c.element();
@@ -167,12 +167,12 @@ public class CoGroupByKey<K> extends
}
/**
- * A DoFn to construct a UnionTable (i.e., a
+ * A OldDoFn to construct a UnionTable (i.e., a
* {@code PCollection<KV<K, RawUnionValue>>} from a
* {@code PCollection<KV<K, V>>}.
*/
private static class ConstructUnionTableFn<K, V> extends
- DoFn<KV<K, V>, KV<K, RawUnionValue>> {
+ OldDoFn<KV<K, V>, KV<K, RawUnionValue>> {
private final int index;
@@ -188,12 +188,12 @@ public class CoGroupByKey<K> extends
}
/**
- * A DoFn to construct a CoGbkResult from an input grouped union
+ * A OldDoFn to construct a CoGbkResult from an input grouped union
* table.
*/
private static class ConstructCoGbkResultFn<K>
- extends DoFn<KV<K, Iterable<RawUnionValue>>,
- KV<K, CoGbkResult>> {
+ extends OldDoFn<KV<K, Iterable<RawUnionValue>>,
+ KV<K, CoGbkResult>> {
private final CoGbkResultSchema schema;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java
index bd57339..dc1e74b 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java
@@ -23,6 +23,7 @@ import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.util.ExecutableTrigger;
import com.google.common.base.Joiner;
+
import org.joda.time.Instant;
import java.util.Arrays;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
index 563455b..324ab08 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java
@@ -25,6 +25,7 @@ import org.joda.time.Instant;
import java.util.List;
import java.util.Objects;
+
import javax.annotation.Nullable;
/**
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java
index 6f9c717..45898e0 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java
@@ -24,7 +24,6 @@ import org.apache.beam.sdk.coders.DurationCoder;
import org.apache.beam.sdk.coders.InstantCoder;
import com.fasterxml.jackson.annotation.JsonCreator;
-
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.joda.time.ReadableDuration;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java
index 40f3496..7267d00 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.transforms.windowing;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger;
+
import org.joda.time.Instant;
import java.util.List;
[10/19] incubator-beam git commit: Rename DoFn to OldDoFn
Posted by dh...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
index 89243a3..a4af1b0 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
@@ -19,7 +19,7 @@ package org.apache.beam.runners.flink.translation.functions;
import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.util.WindowingStrategy;
import org.apache.beam.sdk.values.PCollectionView;
@@ -30,13 +30,13 @@ import org.apache.flink.util.Collector;
import java.util.Map;
/**
- * Encapsulates a {@link org.apache.beam.sdk.transforms.DoFn}
+ * Encapsulates a {@link OldDoFn}
* inside a Flink {@link org.apache.flink.api.common.functions.RichMapPartitionFunction}.
*/
public class FlinkDoFnFunction<InputT, OutputT>
extends RichMapPartitionFunction<WindowedValue<InputT>, WindowedValue<OutputT>> {
- private final DoFn<InputT, OutputT> doFn;
+ private final OldDoFn<InputT, OutputT> doFn;
private final SerializedPipelineOptions serializedOptions;
private final Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs;
@@ -47,7 +47,7 @@ public class FlinkDoFnFunction<InputT, OutputT>
private final WindowingStrategy<?, ?> windowingStrategy;
public FlinkDoFnFunction(
- DoFn<InputT, OutputT> doFn,
+ OldDoFn<InputT, OutputT> doFn,
WindowingStrategy<?, ?> windowingStrategy,
Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs,
PipelineOptions options) {
@@ -56,7 +56,7 @@ public class FlinkDoFnFunction<InputT, OutputT>
this.serializedOptions = new SerializedPipelineOptions(options);
this.windowingStrategy = windowingStrategy;
- this.requiresWindowAccess = doFn instanceof DoFn.RequiresWindowAccess;
+ this.requiresWindowAccess = doFn instanceof OldDoFn.RequiresWindowAccess;
this.hasSideInputs = !sideInputs.isEmpty();
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java
index 9074d72..2d36043 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink.translation.functions;
import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.CombineFnBase;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
import org.apache.beam.sdk.transforms.windowing.OutputTimeFn;
@@ -60,7 +60,7 @@ public class FlinkMergingNonShuffleReduceFunction<
private final CombineFnBase.PerKeyCombineFn<K, InputT, AccumT, OutputT> combineFn;
- private final DoFn<KV<K, InputT>, KV<K, OutputT>> doFn;
+ private final OldDoFn<KV<K, InputT>, KV<K, OutputT>> doFn;
private final WindowingStrategy<?, W> windowingStrategy;
@@ -81,8 +81,8 @@ public class FlinkMergingNonShuffleReduceFunction<
this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
- // dummy DoFn because we need one for ProcessContext
- this.doFn = new DoFn<KV<K, InputT>, KV<K, OutputT>>() {
+ // dummy OldDoFn because we need one for ProcessContext
+ this.doFn = new OldDoFn<KV<K, InputT>, KV<K, OutputT>>() {
@Override
public void processElement(ProcessContext c) throws Exception {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java
index f92e76f..6e673fc 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java
@@ -19,7 +19,7 @@ package org.apache.beam.runners.flink.translation.functions;
import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.join.RawUnionValue;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.util.WindowingStrategy;
@@ -32,7 +32,7 @@ import org.apache.flink.util.Collector;
import java.util.Map;
/**
- * Encapsulates a {@link org.apache.beam.sdk.transforms.DoFn} that uses side outputs
+ * Encapsulates a {@link OldDoFn} that uses side outputs
* inside a Flink {@link org.apache.flink.api.common.functions.RichMapPartitionFunction}.
*
* We get a mapping from {@link org.apache.beam.sdk.values.TupleTag} to output index
@@ -42,7 +42,7 @@ import java.util.Map;
public class FlinkMultiOutputDoFnFunction<InputT, OutputT>
extends RichMapPartitionFunction<WindowedValue<InputT>, WindowedValue<RawUnionValue>> {
- private final DoFn<InputT, OutputT> doFn;
+ private final OldDoFn<InputT, OutputT> doFn;
private final SerializedPipelineOptions serializedOptions;
private final Map<TupleTag<?>, Integer> outputMap;
@@ -55,7 +55,7 @@ public class FlinkMultiOutputDoFnFunction<InputT, OutputT>
private final WindowingStrategy<?, ?> windowingStrategy;
public FlinkMultiOutputDoFnFunction(
- DoFn<InputT, OutputT> doFn,
+ OldDoFn<InputT, OutputT> doFn,
WindowingStrategy<?, ?> windowingStrategy,
Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs,
PipelineOptions options,
@@ -64,7 +64,7 @@ public class FlinkMultiOutputDoFnFunction<InputT, OutputT>
this.serializedOptions = new SerializedPipelineOptions(options);
this.outputMap = outputMap;
- this.requiresWindowAccess = doFn instanceof DoFn.RequiresWindowAccess;
+ this.requiresWindowAccess = doFn instanceof OldDoFn.RequiresWindowAccess;
this.hasSideInputs = !sideInputs.isEmpty();
this.windowingStrategy = windowingStrategy;
this.sideInputs = sideInputs;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java
index 71b6d27..fab3c85 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java
@@ -18,7 +18,7 @@
package org.apache.beam.runners.flink.translation.functions;
import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.join.RawUnionValue;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -35,7 +35,7 @@ import java.util.Collection;
import java.util.Map;
/**
- * {@link DoFn.ProcessContext} for {@link FlinkMultiOutputDoFnFunction} that supports
+ * {@link OldDoFn.ProcessContext} for {@link FlinkMultiOutputDoFnFunction} that supports
* side outputs.
*/
class FlinkMultiOutputProcessContext<InputT, OutputT>
@@ -50,7 +50,7 @@ class FlinkMultiOutputProcessContext<InputT, OutputT>
FlinkMultiOutputProcessContext(
PipelineOptions pipelineOptions,
RuntimeContext runtimeContext,
- DoFn<InputT, OutputT> doFn,
+ OldDoFn<InputT, OutputT> doFn,
WindowingStrategy<?, ?> windowingStrategy,
Collector<WindowedValue<RawUnionValue>> collector,
Map<TupleTag<?>, Integer> outputMap,
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java
index d49821b..98446f9 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java
@@ -17,18 +17,16 @@
*/
package org.apache.beam.runners.flink.translation.functions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.WindowFn;
import org.joda.time.Instant;
-import java.util.Collection;
-
/**
* {@link WindowFn.AssignContext} for calling a {@link WindowFn} for elements emitted from
- * {@link org.apache.beam.sdk.transforms.DoFn#startBundle(DoFn.Context)}
- * or {@link DoFn#finishBundle(DoFn.Context)}.
+ * {@link OldDoFn#startBundle(OldDoFn.Context)}
+ * or {@link OldDoFn#finishBundle(OldDoFn.Context)}.
*
* <p>In those cases the {@code WindowFn} is not allowed to access any element information.
*/
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java
index c29e1df..2db4b7b 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink.translation.functions;
import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.CombineFnBase;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.OutputTimeFn;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -58,7 +58,7 @@ public class FlinkPartialReduceFunction<K, InputT, AccumT, W extends BoundedWind
protected final CombineFnBase.PerKeyCombineFn<K, InputT, AccumT, ?> combineFn;
- protected final DoFn<KV<K, InputT>, KV<K, AccumT>> doFn;
+ protected final OldDoFn<KV<K, InputT>, KV<K, AccumT>> doFn;
protected final WindowingStrategy<?, W> windowingStrategy;
@@ -77,8 +77,8 @@ public class FlinkPartialReduceFunction<K, InputT, AccumT, W extends BoundedWind
this.sideInputs = sideInputs;
this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
- // dummy DoFn because we need one for ProcessContext
- this.doFn = new DoFn<KV<K, InputT>, KV<K, AccumT>>() {
+ // dummy OldDoFn because we need one for ProcessContext
+ this.doFn = new OldDoFn<KV<K, InputT>, KV<K, AccumT>>() {
@Override
public void processElement(ProcessContext c) throws Exception {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java
index 235a803..3954d1f 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.TimerInternals;
@@ -48,10 +48,10 @@ import java.util.Iterator;
import java.util.Map;
/**
- * {@link org.apache.beam.sdk.transforms.DoFn.ProcessContext} for our Flink Wrappers.
+ * {@link OldDoFn.ProcessContext} for our Flink Wrappers.
*/
class FlinkProcessContext<InputT, OutputT>
- extends DoFn<InputT, OutputT>.ProcessContext {
+ extends OldDoFn<InputT, OutputT>.ProcessContext {
private final PipelineOptions pipelineOptions;
private final RuntimeContext runtimeContext;
@@ -67,7 +67,7 @@ class FlinkProcessContext<InputT, OutputT>
FlinkProcessContext(
PipelineOptions pipelineOptions,
RuntimeContext runtimeContext,
- DoFn<InputT, OutputT> doFn,
+ OldDoFn<InputT, OutputT> doFn,
WindowingStrategy<?, ?> windowingStrategy,
Collector<WindowedValue<OutputT>> collector,
Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs) {
@@ -80,7 +80,7 @@ class FlinkProcessContext<InputT, OutputT>
this.pipelineOptions = pipelineOptions;
this.runtimeContext = runtimeContext;
this.collector = collector;
- this.requiresWindowAccess = doFn instanceof DoFn.RequiresWindowAccess;
+ this.requiresWindowAccess = doFn instanceof OldDoFn.RequiresWindowAccess;
this.windowingStrategy = windowingStrategy;
this.sideInputs = sideInputs;
@@ -90,7 +90,7 @@ class FlinkProcessContext<InputT, OutputT>
FlinkProcessContext(
PipelineOptions pipelineOptions,
RuntimeContext runtimeContext,
- DoFn<InputT, OutputT> doFn,
+ OldDoFn<InputT, OutputT> doFn,
WindowingStrategy<?, ?> windowingStrategy,
Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputs) {
doFn.super();
@@ -101,7 +101,7 @@ class FlinkProcessContext<InputT, OutputT>
this.pipelineOptions = pipelineOptions;
this.runtimeContext = runtimeContext;
this.collector = null;
- this.requiresWindowAccess = doFn instanceof DoFn.RequiresWindowAccess;
+ this.requiresWindowAccess = doFn instanceof OldDoFn.RequiresWindowAccess;
this.windowingStrategy = windowingStrategy;
this.sideInputs = sideInputs;
@@ -141,7 +141,7 @@ class FlinkProcessContext<InputT, OutputT>
public BoundedWindow window() {
if (!requiresWindowAccess) {
throw new UnsupportedOperationException(
- "window() is only available in the context of a DoFn marked as RequiresWindowAccess.");
+ "window() is only available in the context of a OldDoFn marked as RequiresWindowAccess.");
}
return Iterables.getOnlyElement(windowedValue.getWindows());
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java
index 9cbc6b9..b1729a4 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink.translation.functions;
import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.CombineFnBase;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.OutputTimeFn;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -60,7 +60,7 @@ public class FlinkReduceFunction<K, AccumT, OutputT, W extends BoundedWindow>
protected final CombineFnBase.PerKeyCombineFn<K, ?, AccumT, OutputT> combineFn;
- protected final DoFn<KV<K, AccumT>, KV<K, OutputT>> doFn;
+ protected final OldDoFn<KV<K, AccumT>, KV<K, OutputT>> doFn;
protected final WindowingStrategy<?, W> windowingStrategy;
@@ -81,8 +81,8 @@ public class FlinkReduceFunction<K, AccumT, OutputT, W extends BoundedWindow>
this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
- // dummy DoFn because we need one for ProcessContext
- this.doFn = new DoFn<KV<K, AccumT>, KV<K, OutputT>>() {
+ // dummy OldDoFn because we need one for ProcessContext
+ this.doFn = new OldDoFn<KV<K, AccumT>, KV<K, OutputT>>() {
@Override
public void processElement(ProcessContext c) throws Exception {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java
index e40d6e3..74ec66a 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java
@@ -24,7 +24,7 @@ import org.apache.beam.runners.flink.translation.wrappers.SerializableFnAggregat
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.transforms.windowing.WindowFn;
@@ -52,13 +52,13 @@ import java.util.Collection;
* */
public abstract class FlinkAbstractParDoWrapper<IN, OUTDF, OUTFL> extends RichFlatMapFunction<WindowedValue<IN>, WindowedValue<OUTFL>> {
- private final DoFn<IN, OUTDF> doFn;
+ private final OldDoFn<IN, OUTDF> doFn;
private final WindowingStrategy<?, ?> windowingStrategy;
private final SerializedPipelineOptions serializedPipelineOptions;
private DoFnProcessContext context;
- public FlinkAbstractParDoWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, DoFn<IN, OUTDF> doFn) {
+ public FlinkAbstractParDoWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, OldDoFn<IN, OUTDF> doFn) {
checkNotNull(options);
checkNotNull(windowingStrategy);
checkNotNull(doFn);
@@ -104,15 +104,15 @@ public abstract class FlinkAbstractParDoWrapper<IN, OUTDF, OUTFL> extends RichFl
doFn.processElement(this.context);
}
- private class DoFnProcessContext extends DoFn<IN, OUTDF>.ProcessContext {
+ private class DoFnProcessContext extends OldDoFn<IN, OUTDF>.ProcessContext {
- private final DoFn<IN, OUTDF> fn;
+ private final OldDoFn<IN, OUTDF> fn;
protected final Collector<WindowedValue<OUTFL>> collector;
private WindowedValue<IN> element;
- private DoFnProcessContext(DoFn<IN, OUTDF> function,
+ private DoFnProcessContext(OldDoFn<IN, OUTDF> function,
Collector<WindowedValue<OUTFL>> outCollector) {
function.super();
super.setupDelegateAggregators();
@@ -137,9 +137,9 @@ public abstract class FlinkAbstractParDoWrapper<IN, OUTDF, OUTFL> extends RichFl
@Override
public BoundedWindow window() {
- if (!(fn instanceof DoFn.RequiresWindowAccess)) {
+ if (!(fn instanceof OldDoFn.RequiresWindowAccess)) {
throw new UnsupportedOperationException(
- "window() is only available in the context of a DoFn marked as RequiresWindowAccess.");
+ "window() is only available in the context of a OldDoFn marked as RequiresWindowAccess.");
}
Collection<? extends BoundedWindow> windows = this.element.getWindows();
@@ -211,7 +211,7 @@ public abstract class FlinkAbstractParDoWrapper<IN, OUTDF, OUTFL> extends RichFl
throw new IllegalArgumentException(String.format(
"Cannot output with timestamp %s. Output timestamps must be no earlier than the "
+ "timestamp of the current input (%s) minus the allowed skew (%s). See the "
- + "DoFn#getAllowedTimestmapSkew() Javadoc for details on changing the allowed skew.",
+ + "OldDoFn#getAllowedTimestmapSkew() Javadoc for details on changing the allowed skew.",
timestamp, ref.getTimestamp(),
PeriodFormat.getDefault().print(doFn.getAllowedTimestampSkew().toPeriod())));
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java
index 0e977db..103a12b 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java
@@ -36,7 +36,7 @@ import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.OutputTimeFn;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
@@ -112,7 +112,7 @@ public class FlinkGroupAlsoByWindowWrapper<K, VIN, VACC, VOUT>
private transient CoderRegistry coderRegistry;
- private DoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> operator;
+ private OldDoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> operator;
private ProcessContext context;
@@ -263,7 +263,7 @@ public class FlinkGroupAlsoByWindowWrapper<K, VIN, VACC, VOUT>
* a function with that combiner is created, so that elements are combined as they arrive. This is
* done for speed and (in most of the cases) for reduction of the per-window state.
*/
- private <W extends BoundedWindow> DoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> createGroupAlsoByWindowOperator() {
+ private <W extends BoundedWindow> OldDoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> createGroupAlsoByWindowOperator() {
if (this.operator == null) {
StateInternalsFactory<K> stateInternalsFactory = new GroupAlsoByWindowWrapperStateInternalsFactory();
@@ -272,7 +272,7 @@ public class FlinkGroupAlsoByWindowWrapper<K, VIN, VACC, VOUT>
// Thus VOUT == Iterable<VIN>
Coder<VIN> inputValueCoder = inputKvCoder.getValueCoder();
- this.operator = (DoFn) GroupAlsoByWindowViaWindowSetDoFn.create(
+ this.operator = (OldDoFn) GroupAlsoByWindowViaWindowSetDoFn.create(
(WindowingStrategy<?, W>) this.windowingStrategy, stateInternalsFactory, SystemReduceFn.<K, VIN, W>buffering(inputValueCoder));
} else {
Coder<K> inputKeyCoder = inputKvCoder.getKeyCoder();
@@ -446,7 +446,7 @@ public class FlinkGroupAlsoByWindowWrapper<K, VIN, VACC, VOUT>
private KeyedWorkItem<K, VIN> element;
- public ProcessContext(DoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> function,
+ public ProcessContext(OldDoFn<KeyedWorkItem<K, VIN>, KV<K, VOUT>> function,
TimestampedCollector<WindowedValue<KV<K, VOUT>>> outCollector,
FlinkTimerInternals timerInternals) {
function.super();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java
index 619b887..0ea0cab 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink.translation.wrappers.streaming;
import static com.google.common.base.Preconditions.checkNotNull;
import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.join.RawUnionValue;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.util.WindowingInternals;
@@ -40,7 +40,7 @@ public class FlinkParDoBoundMultiWrapper<IN, OUT> extends FlinkAbstractParDoWrap
private final TupleTag<?> mainTag;
private final Map<TupleTag<?>, Integer> outputLabels;
- public FlinkParDoBoundMultiWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, DoFn<IN, OUT> doFn, TupleTag<?> mainTag, Map<TupleTag<?>, Integer> tagsToLabels) {
+ public FlinkParDoBoundMultiWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, OldDoFn<IN, OUT> doFn, TupleTag<?> mainTag, Map<TupleTag<?>, Integer> tagsToLabels) {
super(options, windowingStrategy, doFn);
this.mainTag = checkNotNull(mainTag);
this.outputLabels = checkNotNull(tagsToLabels);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java
index 4def0c6..6be94b2 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java
@@ -19,7 +19,7 @@ package org.apache.beam.runners.flink.translation.wrappers.streaming;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.TimerInternals;
@@ -41,7 +41,7 @@ import java.util.Collection;
* */
public class FlinkParDoBoundWrapper<IN, OUT> extends FlinkAbstractParDoWrapper<IN, OUT, OUT> {
- public FlinkParDoBoundWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, DoFn<IN, OUT> doFn) {
+ public FlinkParDoBoundWrapper(PipelineOptions options, WindowingStrategy<?, ?> windowingStrategy, OldDoFn<IN, OUT> doFn) {
super(options, windowingStrategy, doFn);
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java
index 9e55002..a0b33f8 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java
@@ -19,7 +19,7 @@ package org.apache.beam.runners.flink.translation.wrappers.streaming.state;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.TimerInternals;
@@ -106,7 +106,7 @@ public abstract class AbstractFlinkTimerInternals<K, VIN> implements TimerIntern
}
}
- public void encodeTimerInternals(DoFn.ProcessContext context,
+ public void encodeTimerInternals(OldDoFn.ProcessContext context,
StateCheckpointWriter writer,
KvCoder<K, VIN> kvCoder,
Coder<? extends BoundedWindow> windowCoder) throws IOException {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
index 61e219c..c24d91d 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.WindowedValue;
@@ -114,7 +114,7 @@ public class PipelineOptionsTest {
}
- private static class TestDoFn extends DoFn<Object, Object> {
+ private static class TestDoFn extends OldDoFn<Object, Object> {
@Override
public void processElement(ProcessContext c) throws Exception {
@@ -126,7 +126,7 @@ public class PipelineOptionsTest {
}
private static class TestParDoWrapper extends FlinkAbstractParDoWrapper {
- public TestParDoWrapper(PipelineOptions options, WindowingStrategy windowingStrategy, DoFn doFn) {
+ public TestParDoWrapper(PipelineOptions options, WindowingStrategy windowingStrategy, OldDoFn doFn) {
super(options, windowingStrategy, doFn);
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java
index bb79b27..ca70096 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.io.CountingInput;
import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.PCollection;
@@ -72,7 +72,7 @@ public class ReadSourceITCase extends JavaProgramTestBase {
PCollection<String> result = p
.apply(CountingInput.upTo(10))
- .apply(ParDo.of(new DoFn<Long, String>() {
+ .apply(ParDo.of(new OldDoFn<Long, String>() {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output(c.element().toString());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java
index fe71802..bc69f34 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java
@@ -20,7 +20,7 @@ package org.apache.beam.runners.flink;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.io.CountingInput;
import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import com.google.common.base.Joiner;
import org.apache.flink.streaming.util.StreamingProgramTestBase;
@@ -59,7 +59,7 @@ public class ReadSourceStreamingITCase extends StreamingProgramTestBase {
p
.apply(CountingInput.upTo(10))
- .apply(ParDo.of(new DoFn<Long, String>() {
+ .apply(ParDo.of(new OldDoFn<Long, String>() {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output(c.element().toString());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java
index 1b55c61..ca183a8 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java
@@ -21,7 +21,7 @@ import org.apache.beam.runners.flink.FlinkTestPipeline;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
@@ -61,7 +61,7 @@ public class GroupByNullKeyTest extends StreamingProgramTestBase implements Seri
compareResultsByLinesInMemory(Joiner.on('\n').join(EXPECTED_RESULT), resultPath);
}
- public static class ExtractUserAndTimestamp extends DoFn<KV<Integer, String>, String> {
+ public static class ExtractUserAndTimestamp extends OldDoFn<KV<Integer, String>, String> {
private static final long serialVersionUID = 0;
@Override
@@ -97,7 +97,7 @@ public class GroupByNullKeyTest extends StreamingProgramTestBase implements Seri
.withAllowedLateness(Duration.ZERO)
.discardingFiredPanes())
- .apply(ParDo.of(new DoFn<String, KV<Void, String>>() {
+ .apply(ParDo.of(new OldDoFn<String, KV<Void, String>>() {
@Override
public void processElement(ProcessContext c) throws Exception {
String elem = c.element();
@@ -105,7 +105,7 @@ public class GroupByNullKeyTest extends StreamingProgramTestBase implements Seri
}
}))
.apply(GroupByKey.<Void, String>create())
- .apply(ParDo.of(new DoFn<KV<Void, Iterable<String>>, String>() {
+ .apply(ParDo.of(new OldDoFn<KV<Void, Iterable<String>>, String>() {
@Override
public void processElement(ProcessContext c) throws Exception {
KV<Void, Iterable<String>> elem = c.element();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java
index 1efb42f..7912aee 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java
@@ -22,7 +22,7 @@ import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.transforms.Count;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.Sessions;
import org.apache.beam.sdk.transforms.windowing.Window;
@@ -103,7 +103,7 @@ public class TopWikipediaSessionsITCase extends StreamingProgramTestBase impleme
- .apply(ParDo.of(new DoFn<TableRow, String>() {
+ .apply(ParDo.of(new OldDoFn<TableRow, String>() {
@Override
public void processElement(ProcessContext c) throws Exception {
TableRow row = c.element();
@@ -120,7 +120,7 @@ public class TopWikipediaSessionsITCase extends StreamingProgramTestBase impleme
.apply(Count.<String>perElement());
- PCollection<String> format = output.apply(ParDo.of(new DoFn<KV<String, Long>, String>() {
+ PCollection<String> format = output.apply(ParDo.of(new OldDoFn<KV<String, Long>, String>() {
@Override
public void processElement(ProcessContext c) throws Exception {
KV<String, Long> el = c.element();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java
index 7fd203f..ac06b52 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java
@@ -47,9 +47,9 @@ import org.apache.beam.sdk.options.StreamingOptions;
import org.apache.beam.sdk.runners.TransformTreeNode;
import org.apache.beam.sdk.transforms.AppliedPTransform;
import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.View;
@@ -82,7 +82,6 @@ import com.google.api.services.dataflow.model.WorkerPool;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
-
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -94,7 +93,6 @@ import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
-
import javax.annotation.Nullable;
/**
@@ -1021,7 +1019,7 @@ public class DataflowPipelineTranslator {
}
private static void translateFn(
- DoFn fn,
+ OldDoFn fn,
WindowingStrategy windowingStrategy,
Iterable<PCollectionView<?>> sideInputs,
Coder inputCoder,
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
index e7cc20e..d762d50 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
@@ -78,9 +78,9 @@ import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -143,7 +143,6 @@ import com.google.common.collect.Multimap;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-
import org.joda.time.DateTimeUtils;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
@@ -173,7 +172,6 @@ import java.util.Random;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
-
import javax.annotation.Nullable;
/**
@@ -762,13 +760,14 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
PTransform<PCollection<T>, PCollection<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>>> {
/**
- * A {@link DoFn} that for each element outputs a {@code KV} structure suitable for
+ * A {@link OldDoFn} that for each element outputs a {@code KV} structure suitable for
* grouping by the hash of the window's byte representation and sorting the grouped values
* using the window's byte representation.
*/
@SystemDoFnInternal
private static class UseWindowHashAsKeyAndWindowAsSortKeyDoFn<T, W extends BoundedWindow>
- extends DoFn<T, KV<Integer, KV<W, WindowedValue<T>>>> implements DoFn.RequiresWindowAccess {
+ extends OldDoFn<T, KV<Integer, KV<W, WindowedValue<T>>>> implements
+ OldDoFn.RequiresWindowAccess {
private final IsmRecordCoder<?> ismCoderForHash;
private UseWindowHashAsKeyAndWindowAsSortKeyDoFn(IsmRecordCoder<?> ismCoderForHash) {
@@ -828,15 +827,15 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
extends PTransform<PCollection<T>, PCollectionView<T>> {
/**
- * A {@link DoFn} that outputs {@link IsmRecord}s. These records are structured as follows:
+ * A {@link OldDoFn} that outputs {@link IsmRecord}s. These records are structured as follows:
* <ul>
* <li>Key 1: Window
* <li>Value: Windowed value
* </ul>
*/
static class IsmRecordForSingularValuePerWindowDoFn<T, W extends BoundedWindow>
- extends DoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
- IsmRecord<WindowedValue<T>>> {
+ extends OldDoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
+ IsmRecord<WindowedValue<T>>> {
private final Coder<W> windowCoder;
IsmRecordForSingularValuePerWindowDoFn(Coder<W> windowCoder) {
@@ -902,8 +901,8 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
applyForSingleton(
DataflowRunner runner,
PCollection<T> input,
- DoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
- IsmRecord<WindowedValue<FinalT>>> doFn,
+ OldDoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
+ IsmRecord<WindowedValue<FinalT>>> doFn,
boolean hasDefault,
FinalT defaultValue,
Coder<FinalT> defaultValueCoder) {
@@ -998,7 +997,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
static class BatchViewAsList<T>
extends PTransform<PCollection<T>, PCollectionView<List<T>>> {
/**
- * A {@link DoFn} which creates {@link IsmRecord}s assuming that each element is within the
+ * A {@link OldDoFn} which creates {@link IsmRecord}s assuming that each element is within the
* global window. Each {@link IsmRecord} has
* <ul>
* <li>Key 1: Global window</li>
@@ -1008,7 +1007,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
*/
@SystemDoFnInternal
static class ToIsmRecordForGlobalWindowDoFn<T>
- extends DoFn<T, IsmRecord<WindowedValue<T>>> {
+ extends OldDoFn<T, IsmRecord<WindowedValue<T>>> {
long indexInBundle;
@Override
@@ -1030,7 +1029,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
}
/**
- * A {@link DoFn} which creates {@link IsmRecord}s comparing successive elements windows
+ * A {@link OldDoFn} which creates {@link IsmRecord}s comparing successive elements windows
* to locate the window boundaries. The {@link IsmRecord} has:
* <ul>
* <li>Key 1: Window</li>
@@ -1040,8 +1039,8 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
*/
@SystemDoFnInternal
static class ToIsmRecordForNonGlobalWindowDoFn<T, W extends BoundedWindow>
- extends DoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
- IsmRecord<WindowedValue<T>>> {
+ extends OldDoFn<KV<Integer, Iterable<KV<W, WindowedValue<T>>>>,
+ IsmRecord<WindowedValue<T>>> {
private final Coder<W> windowCoder;
ToIsmRecordForNonGlobalWindowDoFn(Coder<W> windowCoder) {
@@ -1174,7 +1173,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, V>>> {
/**
- * A {@link DoFn} which groups elements by window boundaries. For each group,
+ * A {@link OldDoFn} which groups elements by window boundaries. For each group,
* the group of elements is transformed into a {@link TransformedMap}.
* The transformed {@code Map<K, V>} is backed by a {@code Map<K, WindowedValue<V>>}
* and contains a function {@code WindowedValue<V> -> V}.
@@ -1188,10 +1187,10 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
* </ul>
*/
static class ToMapDoFn<K, V, W extends BoundedWindow>
- extends DoFn<KV<Integer, Iterable<KV<W, WindowedValue<KV<K, V>>>>>,
- IsmRecord<WindowedValue<TransformedMap<K,
- WindowedValue<V>,
- V>>>> {
+ extends OldDoFn<KV<Integer, Iterable<KV<W, WindowedValue<KV<K, V>>>>>,
+ IsmRecord<WindowedValue<TransformedMap<K,
+ WindowedValue<V>,
+ V>>>> {
private final Coder<W> windowCoder;
ToMapDoFn(Coder<W> windowCoder) {
@@ -1358,8 +1357,8 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
@SystemDoFnInternal
private static class GroupByKeyHashAndSortByKeyAndWindowDoFn<K, V, W>
- extends DoFn<KV<K, V>, KV<Integer, KV<KV<K, W>, WindowedValue<V>>>>
- implements DoFn.RequiresWindowAccess {
+ extends OldDoFn<KV<K, V>, KV<Integer, KV<KV<K, W>, WindowedValue<V>>>>
+ implements OldDoFn.RequiresWindowAccess {
private final IsmRecordCoder<?> coder;
private GroupByKeyHashAndSortByKeyAndWindowDoFn(IsmRecordCoder<?> coder) {
@@ -1412,7 +1411,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
}
/**
- * A {@link DoFn} which creates {@link IsmRecord}s comparing successive elements windows
+ * A {@link OldDoFn} which creates {@link IsmRecord}s comparing successive elements windows
* and keys to locate window and key boundaries. The main output {@link IsmRecord}s have:
* <ul>
* <li>Key 1: Window</li>
@@ -1424,12 +1423,12 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
* <p>Additionally, we output all the unique keys per window seen to {@code outputForEntrySet}
* and the unique key count per window to {@code outputForSize}.
*
- * <p>Finally, if this DoFn has been requested to perform unique key checking, it will
+ * <p>Finally, if this OldDoFn has been requested to perform unique key checking, it will
* throw an {@link IllegalStateException} if more than one key per window is found.
*/
static class ToIsmRecordForMapLikeDoFn<K, V, W extends BoundedWindow>
- extends DoFn<KV<Integer, Iterable<KV<KV<K, W>, WindowedValue<V>>>>,
- IsmRecord<WindowedValue<V>>> {
+ extends OldDoFn<KV<Integer, Iterable<KV<KV<K, W>, WindowedValue<V>>>>,
+ IsmRecord<WindowedValue<V>>> {
private final TupleTag<KV<Integer, KV<W, Long>>> outputForSize;
private final TupleTag<KV<Integer, KV<W, K>>> outputForEntrySet;
@@ -1557,7 +1556,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
}
/**
- * A {@link DoFn} which outputs a metadata {@link IsmRecord} per window of:
+ * A {@link OldDoFn} which outputs a metadata {@link IsmRecord} per window of:
* <ul>
* <li>Key 1: META key</li>
* <li>Key 2: window</li>
@@ -1565,11 +1564,11 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
* <li>Value: sum of values for window</li>
* </ul>
*
- * <p>This {@link DoFn} is meant to be used to compute the number of unique keys
+ * <p>This {@link OldDoFn} is meant to be used to compute the number of unique keys
* per window for map and multimap side inputs.
*/
static class ToIsmMetadataRecordForSizeDoFn<K, V, W extends BoundedWindow>
- extends DoFn<KV<Integer, Iterable<KV<W, Long>>>, IsmRecord<WindowedValue<V>>> {
+ extends OldDoFn<KV<Integer, Iterable<KV<W, Long>>>, IsmRecord<WindowedValue<V>>> {
private final Coder<W> windowCoder;
ToIsmMetadataRecordForSizeDoFn(Coder<W> windowCoder) {
this.windowCoder = windowCoder;
@@ -1606,7 +1605,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
}
/**
- * A {@link DoFn} which outputs a metadata {@link IsmRecord} per window and key pair of:
+ * A {@link OldDoFn} which outputs a metadata {@link IsmRecord} per window and key pair of:
* <ul>
* <li>Key 1: META key</li>
* <li>Key 2: window</li>
@@ -1614,11 +1613,11 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
* <li>Value: key</li>
* </ul>
*
- * <p>This {@link DoFn} is meant to be used to output index to key records
+ * <p>This {@link OldDoFn} is meant to be used to output index to key records
* per window for map and multimap side inputs.
*/
static class ToIsmMetadataRecordForKeyDoFn<K, V, W extends BoundedWindow>
- extends DoFn<KV<Integer, Iterable<KV<W, K>>>, IsmRecord<WindowedValue<V>>> {
+ extends OldDoFn<KV<Integer, Iterable<KV<W, K>>>, IsmRecord<WindowedValue<V>>> {
private final Coder<K> keyCoder;
private final Coder<W> windowCoder;
@@ -1658,7 +1657,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
}
/**
- * A {@link DoFn} which partitions sets of elements by window boundaries. Within each
+ * A {@link OldDoFn} which partitions sets of elements by window boundaries. Within each
* partition, the set of elements is transformed into a {@link TransformedMap}.
* The transformed {@code Map<K, Iterable<V>>} is backed by a
* {@code Map<K, Iterable<WindowedValue<V>>>} and contains a function
@@ -1673,10 +1672,10 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
* </ul>
*/
static class ToMultimapDoFn<K, V, W extends BoundedWindow>
- extends DoFn<KV<Integer, Iterable<KV<W, WindowedValue<KV<K, V>>>>>,
- IsmRecord<WindowedValue<TransformedMap<K,
- Iterable<WindowedValue<V>>,
- Iterable<V>>>>> {
+ extends OldDoFn<KV<Integer, Iterable<KV<W, WindowedValue<KV<K, V>>>>>,
+ IsmRecord<WindowedValue<TransformedMap<K,
+ Iterable<WindowedValue<V>>,
+ Iterable<V>>>>> {
private final Coder<W> windowCoder;
ToMultimapDoFn(Coder<W> windowCoder) {
@@ -2335,7 +2334,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
// WindmillSink.
.apply(Reshuffle.<Integer, ValueWithRecordId<T>>of())
.apply("StripIds", ParDo.of(
- new DoFn<KV<Integer, ValueWithRecordId<T>>, T>() {
+ new OldDoFn<KV<Integer, ValueWithRecordId<T>>, T>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().getValue().getValue());
@@ -2372,11 +2371,11 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
}
/**
- * A specialized {@link DoFn} for writing the contents of a {@link PCollection}
+ * A specialized {@link OldDoFn} for writing the contents of a {@link PCollection}
* to a streaming {@link PCollectionView} backend implementation.
*/
private static class StreamingPCollectionViewWriterFn<T>
- extends DoFn<Iterable<T>, T> implements DoFn.RequiresWindowAccess {
+ extends OldDoFn<Iterable<T>, T> implements OldDoFn.RequiresWindowAccess {
private final PCollectionView<?> view;
private final Coder<T> dataCoder;
@@ -2553,7 +2552,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
}
}
- private static class WrapAsList<T> extends DoFn<T, List<T>> {
+ private static class WrapAsList<T> extends OldDoFn<T, List<T>> {
@Override
public void processElement(ProcessContext c) {
c.output(Arrays.asList(c.element()));
@@ -2716,7 +2715,7 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
@Nullable
private PTransform<?, ?> transform;
@Nullable
- private DoFn<?, ?> doFn;
+ private OldDoFn<?, ?> doFn;
/**
* Builds an instance of this class from the overridden transform.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/internal/AssignWindows.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/internal/AssignWindows.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/internal/AssignWindows.java
index 5f808a5..d4f9a90 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/internal/AssignWindows.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/internal/AssignWindows.java
@@ -18,7 +18,7 @@
package org.apache.beam.runners.dataflow.internal;
import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.Window;
@@ -63,9 +63,9 @@ public class AssignWindows<T> extends PTransform<PCollection<T>, PCollection<T>>
} else {
// If the windowFn didn't change, we just run a pass-through transform and then set the
// new windowing strategy.
- return input.apply("Identity", ParDo.of(new DoFn<T, T>() {
+ return input.apply("Identity", ParDo.of(new OldDoFn<T, T>() {
@Override
- public void processElement(DoFn<T, T>.ProcessContext c) throws Exception {
+ public void processElement(OldDoFn<T, T>.ProcessContext c) throws Exception {
c.output(c.element());
}
})).setWindowingStrategyInternal(outputStrategy);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DoFnInfo.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DoFnInfo.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DoFnInfo.java
index f83acbc..2017313 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DoFnInfo.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/DoFnInfo.java
@@ -18,32 +18,32 @@
package org.apache.beam.runners.dataflow.util;
import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.util.WindowingStrategy;
import org.apache.beam.sdk.values.PCollectionView;
import java.io.Serializable;
/**
- * Wrapper class holding the necessary information to serialize a DoFn.
+ * Wrapper class holding the necessary information to serialize a OldDoFn.
*
- * @param <InputT> the type of the (main) input elements of the DoFn
- * @param <OutputT> the type of the (main) output elements of the DoFn
+ * @param <InputT> the type of the (main) input elements of the OldDoFn
+ * @param <OutputT> the type of the (main) output elements of the OldDoFn
*/
public class DoFnInfo<InputT, OutputT> implements Serializable {
- private final DoFn<InputT, OutputT> doFn;
+ private final OldDoFn<InputT, OutputT> doFn;
private final WindowingStrategy<?, ?> windowingStrategy;
private final Iterable<PCollectionView<?>> sideInputViews;
private final Coder<InputT> inputCoder;
- public DoFnInfo(DoFn<InputT, OutputT> doFn, WindowingStrategy<?, ?> windowingStrategy) {
+ public DoFnInfo(OldDoFn<InputT, OutputT> doFn, WindowingStrategy<?, ?> windowingStrategy) {
this.doFn = doFn;
this.windowingStrategy = windowingStrategy;
this.sideInputViews = null;
this.inputCoder = null;
}
- public DoFnInfo(DoFn<InputT, OutputT> doFn, WindowingStrategy<?, ?> windowingStrategy,
+ public DoFnInfo(OldDoFn<InputT, OutputT> doFn, WindowingStrategy<?, ?> windowingStrategy,
Iterable<PCollectionView<?>> sideInputViews, Coder<InputT> inputCoder) {
this.doFn = doFn;
this.windowingStrategy = windowingStrategy;
@@ -51,7 +51,7 @@ public class DoFnInfo<InputT, OutputT> implements Serializable {
this.inputCoder = inputCoder;
}
- public DoFn<InputT, OutputT> getDoFn() {
+ public OldDoFn<InputT, OutputT> getDoFn() {
return doFn;
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslatorTest.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslatorTest.java b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslatorTest.java
index 7d89735..2a01c03 100644
--- a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslatorTest.java
+++ b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslatorTest.java
@@ -49,7 +49,7 @@ import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Count;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
@@ -506,7 +506,7 @@ public class DataflowPipelineTranslatorTest implements Serializable {
}
/**
- * Returns a Step for a DoFn by creating and translating a pipeline.
+ * Returns a Step for a OldDoFn by creating and translating a pipeline.
*/
private static Step createPredefinedStep() throws Exception {
DataflowPipelineOptions options = buildPipelineOptions();
@@ -530,7 +530,7 @@ public class DataflowPipelineTranslatorTest implements Serializable {
return step;
}
- private static class NoOpFn extends DoFn<String, String> {
+ private static class NoOpFn extends OldDoFn<String, String> {
@Override public void processElement(ProcessContext c) throws Exception {
c.output(c.element());
}
@@ -864,7 +864,7 @@ public class DataflowPipelineTranslatorTest implements Serializable {
DataflowPipelineTranslator translator = DataflowPipelineTranslator.fromOptions(options);
Pipeline pipeline = Pipeline.create(options);
- DoFn<Integer, Integer> fn1 = new DoFn<Integer, Integer>() {
+ OldDoFn<Integer, Integer> fn1 = new OldDoFn<Integer, Integer>() {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output(c.element());
@@ -880,7 +880,7 @@ public class DataflowPipelineTranslatorTest implements Serializable {
}
};
- DoFn<Integer, Integer> fn2 = new DoFn<Integer, Integer>() {
+ OldDoFn<Integer, Integer> fn2 = new OldDoFn<Integer, Integer>() {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output(c.element());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/examples/WordCount.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/examples/WordCount.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/examples/WordCount.java
index 4951043..0677030 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/examples/WordCount.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/examples/WordCount.java
@@ -25,8 +25,8 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SimpleFunction;
@@ -44,7 +44,7 @@ public class WordCount {
* of-line. This DoFn tokenizes lines of text into individual words; we pass it to a ParDo in the
* pipeline.
*/
- static class ExtractWordsFn extends DoFn<String, String> {
+ static class ExtractWordsFn extends OldDoFn<String, String> {
private final Aggregator<Long, Long> emptyLines =
createAggregator("emptyLines", new Sum.SumLongFn());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/DoFnFunction.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/DoFnFunction.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/DoFnFunction.java
index b5888bd..f4ce516 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/DoFnFunction.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/DoFnFunction.java
@@ -19,7 +19,7 @@
package org.apache.beam.runners.spark.translation;
import org.apache.beam.runners.spark.util.BroadcastHelper;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.TupleTag;
@@ -39,7 +39,7 @@ import java.util.Map;
public class DoFnFunction<InputT, OutputT>
implements FlatMapFunction<Iterator<WindowedValue<InputT>>,
WindowedValue<OutputT>> {
- private final DoFn<InputT, OutputT> mFunction;
+ private final OldDoFn<InputT, OutputT> mFunction;
private final SparkRuntimeContext mRuntimeContext;
private final Map<TupleTag<?>, BroadcastHelper<?>> mSideInputs;
@@ -48,7 +48,7 @@ public class DoFnFunction<InputT, OutputT>
* @param runtime Runtime to apply function in.
* @param sideInputs Side inputs used in DoFunction.
*/
- public DoFnFunction(DoFn<InputT, OutputT> fn,
+ public DoFnFunction(OldDoFn<InputT, OutputT> fn,
SparkRuntimeContext runtime,
Map<TupleTag<?>, BroadcastHelper<?>> sideInputs) {
this.mFunction = fn;
@@ -69,7 +69,7 @@ public class DoFnFunction<InputT, OutputT>
private final List<WindowedValue<OutputT>> outputs = new LinkedList<>();
- ProcCtxt(DoFn<InputT, OutputT> fn, SparkRuntimeContext runtimeContext, Map<TupleTag<?>,
+ ProcCtxt(OldDoFn<InputT, OutputT> fn, SparkRuntimeContext runtimeContext, Map<TupleTag<?>,
BroadcastHelper<?>> sideInputs) {
super(fn, runtimeContext, sideInputs);
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
index daa767d..e33578d 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
@@ -19,7 +19,7 @@
package org.apache.beam.runners.spark.translation;
import org.apache.beam.runners.spark.util.BroadcastHelper;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.TupleTag;
@@ -45,13 +45,13 @@ import scala.Tuple2;
*/
class MultiDoFnFunction<InputT, OutputT>
implements PairFlatMapFunction<Iterator<WindowedValue<InputT>>, TupleTag<?>, WindowedValue<?>> {
- private final DoFn<InputT, OutputT> mFunction;
+ private final OldDoFn<InputT, OutputT> mFunction;
private final SparkRuntimeContext mRuntimeContext;
private final TupleTag<OutputT> mMainOutputTag;
private final Map<TupleTag<?>, BroadcastHelper<?>> mSideInputs;
MultiDoFnFunction(
- DoFn<InputT, OutputT> fn,
+ OldDoFn<InputT, OutputT> fn,
SparkRuntimeContext runtimeContext,
TupleTag<OutputT> mainOutputTag,
Map<TupleTag<?>, BroadcastHelper<?>> sideInputs) {
@@ -75,7 +75,7 @@ class MultiDoFnFunction<InputT, OutputT>
private final Multimap<TupleTag<?>, WindowedValue<?>> outputs = LinkedListMultimap.create();
- ProcCtxt(DoFn<InputT, OutputT> fn, SparkRuntimeContext runtimeContext, Map<TupleTag<?>,
+ ProcCtxt(OldDoFn<InputT, OutputT> fn, SparkRuntimeContext runtimeContext, Map<TupleTag<?>,
BroadcastHelper<?>> sideInputs) {
super(fn, runtimeContext, sideInputs);
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
index cad2a8e..58ac03c 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.TimerInternals;
@@ -50,17 +50,17 @@ import java.util.Map;
* Spark runner process context.
*/
public abstract class SparkProcessContext<InputT, OutputT, ValueT>
- extends DoFn<InputT, OutputT>.ProcessContext {
+ extends OldDoFn<InputT, OutputT>.ProcessContext {
private static final Logger LOG = LoggerFactory.getLogger(SparkProcessContext.class);
- private final DoFn<InputT, OutputT> fn;
+ private final OldDoFn<InputT, OutputT> fn;
private final SparkRuntimeContext mRuntimeContext;
private final Map<TupleTag<?>, BroadcastHelper<?>> mSideInputs;
protected WindowedValue<InputT> windowedValue;
- SparkProcessContext(DoFn<InputT, OutputT> fn,
+ SparkProcessContext(OldDoFn<InputT, OutputT> fn,
SparkRuntimeContext runtime,
Map<TupleTag<?>, BroadcastHelper<?>> sideInputs) {
fn.super();
@@ -135,9 +135,9 @@ public abstract class SparkProcessContext<InputT, OutputT, ValueT>
@Override
public BoundedWindow window() {
- if (!(fn instanceof DoFn.RequiresWindowAccess)) {
+ if (!(fn instanceof OldDoFn.RequiresWindowAccess)) {
throw new UnsupportedOperationException(
- "window() is only available in the context of a DoFn marked as RequiresWindowAccess.");
+ "window() is only available in the context of a OldDoFn marked as RequiresWindowAccess.");
}
return Iterables.getOnlyElement(windowedValue.getWindows());
}
@@ -200,7 +200,7 @@ public abstract class SparkProcessContext<InputT, OutputT, ValueT>
protected abstract Iterator<ValueT> getOutputIterator();
protected Iterable<ValueT> getOutputIterable(final Iterator<WindowedValue<InputT>> iter,
- final DoFn<InputT, OutputT> doFn) {
+ final OldDoFn<InputT, OutputT> doFn) {
return new Iterable<ValueT>() {
@Override
public Iterator<ValueT> iterator() {
@@ -212,11 +212,11 @@ public abstract class SparkProcessContext<InputT, OutputT, ValueT>
private class ProcCtxtIterator extends AbstractIterator<ValueT> {
private final Iterator<WindowedValue<InputT>> inputIterator;
- private final DoFn<InputT, OutputT> doFn;
+ private final OldDoFn<InputT, OutputT> doFn;
private Iterator<ValueT> outputIterator;
private boolean calledFinish;
- ProcCtxtIterator(Iterator<WindowedValue<InputT>> iterator, DoFn<InputT, OutputT> doFn) {
+ ProcCtxtIterator(Iterator<WindowedValue<InputT>> iterator, OldDoFn<InputT, OutputT> doFn) {
this.inputIterator = iterator;
this.doFn = doFn;
this.outputIterator = getOutputIterator();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java
index c5d5802..c51a500 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java
@@ -39,8 +39,8 @@ import org.apache.beam.sdk.io.AvroIO;
import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.View;
@@ -94,6 +94,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
+
import scala.Tuple2;
/**
@@ -203,7 +204,7 @@ public final class TransformTranslator {
WindowingStrategy<?, W> windowingStrategy =
(WindowingStrategy<?, W>) transform.getWindowingStrategy();
- DoFn<KV<K, Iterable<WindowedValue<V>>>, KV<K, Iterable<V>>> gabwDoFn =
+ OldDoFn<KV<K, Iterable<WindowedValue<V>>>, KV<K, Iterable<V>>> gabwDoFn =
new GroupAlsoByWindowsViaOutputBufferDoFn<K, V, Iterable<V>, W>(
windowingStrategy,
new InMemoryStateInternalsFactory<K>(),
@@ -768,7 +769,7 @@ public final class TransformTranslator {
&& windowFn instanceof GlobalWindows)) {
context.setOutputRDD(transform, inRDD);
} else {
- DoFn<T, T> addWindowsDoFn = new AssignWindowsDoFn<>(windowFn);
+ OldDoFn<T, T> addWindowsDoFn = new AssignWindowsDoFn<>(windowFn);
DoFnFunction<T, T> dofn =
new DoFnFunction<>(addWindowsDoFn, context.getRuntimeContext(), null);
context.setOutputRDD(transform, inRDD.mapPartitions(dofn));
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
index 8154cd7..b0fb931 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
@@ -32,8 +32,8 @@ import org.apache.beam.sdk.io.AvroIO;
import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.transforms.AppliedPTransform;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.FixedWindows;
@@ -315,7 +315,7 @@ public final class StreamingTransformTranslator {
sec.setStream(transform, dStream.window(windowDuration, slideDuration));
}
//--- then we apply windowing to the elements
- DoFn<T, T> addWindowsDoFn = new AssignWindowsDoFn<>(windowFn);
+ OldDoFn<T, T> addWindowsDoFn = new AssignWindowsDoFn<>(windowFn);
DoFnFunction<T, T> dofn = new DoFnFunction<>(addWindowsDoFn,
((StreamingEvaluationContext) context).getRuntimeContext(), null);
@SuppressWarnings("unchecked")
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
index d1f8d12..e4a293f 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/TfIdfTest.java
@@ -24,8 +24,8 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.transforms.Count;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Keys;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.RemoveDuplicates;
@@ -101,7 +101,7 @@ public class TfIdfTest {
// of the words in the document associated with that that URI.
PCollection<KV<URI, String>> uriToWords = uriToContent
.apply("SplitWords", ParDo.of(
- new DoFn<KV<URI, String>, KV<URI, String>>() {
+ new OldDoFn<KV<URI, String>, KV<URI, String>>() {
@Override
public void processElement(ProcessContext c) {
URI uri = c.element().getKey();
@@ -144,7 +144,7 @@ public class TfIdfTest {
// by the URI key.
PCollection<KV<URI, KV<String, Long>>> uriToWordAndCount = uriAndWordToCount
.apply("ShiftKeys", ParDo.of(
- new DoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
+ new OldDoFn<KV<KV<URI, String>, Long>, KV<URI, KV<String, Long>>>() {
@Override
public void processElement(ProcessContext c) {
URI uri = c.element().getKey().getKey();
@@ -183,7 +183,7 @@ public class TfIdfTest {
// divided by the total number of words in the document.
PCollection<KV<String, KV<URI, Double>>> wordToUriAndTf = uriToWordAndCountAndTotal
.apply("ComputeTermFrequencies", ParDo.of(
- new DoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+ new OldDoFn<KV<URI, CoGbkResult>, KV<String, KV<URI, Double>>>() {
@Override
public void processElement(ProcessContext c) {
URI uri = c.element().getKey();
@@ -208,7 +208,7 @@ public class TfIdfTest {
PCollection<KV<String, Double>> wordToDf = wordToDocCount
.apply("ComputeDocFrequencies", ParDo
.withSideInputs(totalDocuments)
- .of(new DoFn<KV<String, Long>, KV<String, Double>>() {
+ .of(new OldDoFn<KV<String, Long>, KV<String, Double>>() {
@Override
public void processElement(ProcessContext c) {
String word = c.element().getKey();
@@ -237,7 +237,7 @@ public class TfIdfTest {
// divided by the log of the document frequency.
return wordToUriAndTfAndDf
.apply("ComputeTfIdf", ParDo.of(
- new DoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
+ new OldDoFn<KV<String, CoGbkResult>, KV<String, KV<URI, Double>>>() {
@Override
public void processElement(ProcessContext c) {
String word = c.element().getKey();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
index 600217d..2e477e9 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/CombinePerKeyTest.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.coders.VarLongCoder;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
@@ -70,7 +70,7 @@ public class CombinePerKeyTest {
private static class SumPerKey<T> extends PTransform<PCollection<T>, PCollection<KV<T, Long>>> {
@Override
public PCollection<KV<T, Long>> apply(PCollection<T> pcol) {
- PCollection<KV<T, Long>> withLongs = pcol.apply(ParDo.of(new DoFn<T, KV<T, Long>>() {
+ PCollection<KV<T, Long>> withLongs = pcol.apply(ParDo.of(new OldDoFn<T, KV<T, Long>>() {
@Override
public void processElement(ProcessContext processContext) throws Exception {
processContext.output(KV.of(processContext.element(), 1L));
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
index 0f60271..263ce99 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/DoFnOutputTest.java
@@ -25,7 +25,7 @@ import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.PCollection;
@@ -46,7 +46,7 @@ public class DoFnOutputTest implements Serializable {
PCollection<String> strings = pipeline.apply(Create.of("a"));
// Test that values written from startBundle() and finishBundle() are written to
// the output
- PCollection<String> output = strings.apply(ParDo.of(new DoFn<String, String>() {
+ PCollection<String> output = strings.apply(ParDo.of(new OldDoFn<String, String>() {
@Override
public void startBundle(Context c) throws Exception {
c.output("start");
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
index ded3eb2..739eec3 100644
--- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
+++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/MultiOutputWordCountTest.java
@@ -30,9 +30,9 @@ import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.ApproximateUnique;
import org.apache.beam.sdk.transforms.Count;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.Max;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
@@ -103,9 +103,9 @@ public class MultiOutputWordCountTest {
}
/**
- * A DoFn that tokenizes lines of text into individual words.
+ * A OldDoFn that tokenizes lines of text into individual words.
*/
- static class ExtractWordsFn extends DoFn<String, String> {
+ static class ExtractWordsFn extends OldDoFn<String, String> {
private final Aggregator<Integer, Integer> totalWords = createAggregator("totalWords",
new Sum.SumIntegerFn());
@@ -170,7 +170,7 @@ public class MultiOutputWordCountTest {
}
}
- private static class FormatCountsFn extends DoFn<KV<String, Long>, String> {
+ private static class FormatCountsFn extends OldDoFn<KV<String, Long>, String> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().getKey() + ": " + c.element().getValue());
[19/19] incubator-beam git commit: Closes #758
Posted by dh...@apache.org.
Closes #758
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/9a329aad
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/9a329aad
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/9a329aad
Branch: refs/heads/master
Commit: 9a329aada82d3ca7f619b88eddea04bdd329d992
Parents: 388816a 3466a0e
Author: Dan Halperin <dh...@google.com>
Authored: Wed Aug 3 18:25:53 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700
----------------------------------------------------------------------
.../beam/examples/DebuggingWordCount.java | 2 +-
.../apache/beam/examples/MinimalWordCount.java | 2 +-
.../apache/beam/examples/WindowedWordCount.java | 4 +-
.../org/apache/beam/examples/WordCount.java | 2 +-
.../examples/common/PubsubFileInjector.java | 6 +-
.../beam/examples/complete/AutoComplete.java | 14 +-
.../examples/complete/StreamingWordExtract.java | 12 +-
.../apache/beam/examples/complete/TfIdf.java | 16 +-
.../examples/complete/TopWikipediaSessions.java | 12 +-
.../examples/complete/TrafficMaxLaneFlow.java | 10 +-
.../beam/examples/complete/TrafficRoutes.java | 12 +-
.../examples/cookbook/BigQueryTornadoes.java | 6 +-
.../cookbook/CombinePerKeyExamples.java | 6 +-
.../examples/cookbook/DatastoreWordCount.java | 11 +-
.../beam/examples/cookbook/FilterExamples.java | 12 +-
.../beam/examples/cookbook/JoinExamples.java | 10 +-
.../examples/cookbook/MaxPerKeyExamples.java | 6 +-
.../beam/examples/cookbook/TriggerExample.java | 12 +-
.../org/apache/beam/examples/WordCountTest.java | 2 +-
.../examples/complete/AutoCompleteTest.java | 4 +-
.../examples/cookbook/TriggerExampleTest.java | 4 +-
.../beam/examples/complete/game/GameStats.java | 10 +-
.../beam/examples/complete/game/UserScore.java | 4 +-
.../complete/game/utils/WriteToBigQuery.java | 12 +-
.../game/utils/WriteWindowedToBigQuery.java | 8 +-
.../examples/complete/game/UserScoreTest.java | 2 +-
.../core/GroupAlsoByWindowViaWindowSetDoFn.java | 12 +-
.../core/UnboundedReadFromBoundedSource.java | 2 +-
.../apache/beam/sdk/util/AssignWindowsDoFn.java | 10 +-
.../org/apache/beam/sdk/util/DoFnRunner.java | 21 +-
.../apache/beam/sdk/util/DoFnRunnerBase.java | 54 +-
.../org/apache/beam/sdk/util/DoFnRunners.java | 24 +-
.../beam/sdk/util/GroupAlsoByWindowsDoFn.java | 6 +-
.../GroupAlsoByWindowsViaOutputBufferDoFn.java | 4 +-
.../sdk/util/GroupByKeyViaGroupByKeyOnly.java | 6 +-
.../sdk/util/LateDataDroppingDoFnRunner.java | 4 +-
.../apache/beam/sdk/util/PaneInfoTracker.java | 1 -
.../apache/beam/sdk/util/ReduceFnRunner.java | 4 +-
.../apache/beam/sdk/util/SimpleDoFnRunner.java | 12 +-
.../org/apache/beam/sdk/util/WatermarkHold.java | 1 -
.../beam/sdk/util/ReduceFnRunnerTest.java | 1 +
.../apache/beam/sdk/util/ReduceFnTester.java | 1 +
.../beam/sdk/util/SimpleDoFnRunnerTest.java | 6 +-
.../GroupAlsoByWindowEvaluatorFactory.java | 4 +-
.../ImmutabilityCheckingBundleFactory.java | 4 +-
.../beam/runners/direct/ParDoEvaluator.java | 4 +-
.../direct/ParDoMultiEvaluatorFactory.java | 11 +-
.../direct/ParDoSingleEvaluatorFactory.java | 11 +-
.../direct/TransformEvaluatorFactory.java | 6 +-
.../direct/WriteWithShardingFactory.java | 4 +-
.../ConsumerTrackingPipelineVisitorTest.java | 22 +-
.../beam/runners/direct/DirectRunnerTest.java | 24 +-
.../ImmutabilityCheckingBundleFactoryTest.java | 6 +-
.../ImmutabilityEnforcementFactoryTest.java | 6 +-
.../direct/KeyedPValueTrackingVisitorTest.java | 6 +-
.../beam/runners/direct/ParDoEvaluatorTest.java | 6 +-
.../direct/ParDoMultiEvaluatorFactoryTest.java | 10 +-
.../direct/ParDoSingleEvaluatorFactoryTest.java | 10 +-
.../runners/direct/WatermarkManagerTest.java | 7 +-
.../beam/runners/flink/examples/TFIDF.java | 16 +-
.../beam/runners/flink/examples/WordCount.java | 4 +-
.../flink/examples/streaming/AutoComplete.java | 16 +-
.../flink/examples/streaming/JoinExamples.java | 8 +-
.../examples/streaming/KafkaIOExamples.java | 4 +-
.../KafkaWindowedWordCountExample.java | 6 +-
.../examples/streaming/WindowedWordCount.java | 6 +-
.../FlinkBatchTransformTranslators.java | 12 +-
.../FlinkStreamingTransformTranslators.java | 9 +-
.../functions/FlinkDoFnFunction.java | 10 +-
.../FlinkMergingNonShuffleReduceFunction.java | 8 +-
.../functions/FlinkMultiOutputDoFnFunction.java | 10 +-
.../FlinkMultiOutputProcessContext.java | 6 +-
.../functions/FlinkNoElementAssignContext.java | 8 +-
.../functions/FlinkPartialReduceFunction.java | 8 +-
.../functions/FlinkProcessContext.java | 16 +-
.../functions/FlinkReduceFunction.java | 8 +-
.../streaming/FlinkAbstractParDoWrapper.java | 18 +-
.../FlinkGroupAlsoByWindowWrapper.java | 10 +-
.../streaming/FlinkParDoBoundMultiWrapper.java | 4 +-
.../streaming/FlinkParDoBoundWrapper.java | 4 +-
.../state/AbstractFlinkTimerInternals.java | 4 +-
.../beam/runners/flink/PipelineOptionsTest.java | 6 +-
.../beam/runners/flink/ReadSourceITCase.java | 4 +-
.../flink/ReadSourceStreamingITCase.java | 4 +-
.../flink/streaming/GroupByNullKeyTest.java | 8 +-
.../streaming/TopWikipediaSessionsITCase.java | 6 +-
.../dataflow/DataflowPipelineTranslator.java | 6 +-
.../beam/runners/dataflow/DataflowRunner.java | 87 ++-
.../dataflow/internal/AssignWindows.java | 6 +-
.../beam/runners/dataflow/util/DoFnInfo.java | 16 +-
.../DataflowPipelineTranslatorTest.java | 10 +-
.../beam/runners/spark/examples/WordCount.java | 4 +-
.../runners/spark/translation/DoFnFunction.java | 8 +-
.../spark/translation/MultiDoFnFunction.java | 8 +-
.../spark/translation/SparkProcessContext.java | 18 +-
.../spark/translation/TransformTranslator.java | 7 +-
.../streaming/StreamingTransformTranslator.java | 4 +-
.../apache/beam/runners/spark/TfIdfTest.java | 12 +-
.../spark/translation/CombinePerKeyTest.java | 4 +-
.../spark/translation/DoFnOutputTest.java | 4 +-
.../translation/MultiOutputWordCountTest.java | 8 +-
.../spark/translation/SerializationTest.java | 10 +-
.../spark/translation/SideEffectsTest.java | 4 +-
.../streaming/KafkaStreamingTest.java | 4 +-
.../org/apache/beam/sdk/coders/AvroCoder.java | 1 -
.../apache/beam/sdk/coders/DurationCoder.java | 1 -
.../apache/beam/sdk/coders/InstantCoder.java | 1 -
.../java/org/apache/beam/sdk/io/PubsubIO.java | 6 +-
.../apache/beam/sdk/io/PubsubUnboundedSink.java | 8 +-
.../beam/sdk/io/PubsubUnboundedSource.java | 4 +-
.../java/org/apache/beam/sdk/io/Source.java | 2 +-
.../main/java/org/apache/beam/sdk/io/Write.java | 21 +-
.../org/apache/beam/sdk/options/GcpOptions.java | 1 -
.../beam/sdk/options/PipelineOptions.java | 8 +-
.../sdk/options/PipelineOptionsFactory.java | 1 -
.../sdk/options/PipelineOptionsReflector.java | 1 +
.../beam/sdk/runners/AggregatorValues.java | 4 +-
.../org/apache/beam/sdk/testing/PAssert.java | 24 +-
.../beam/sdk/testing/SerializableMatchers.java | 1 -
.../apache/beam/sdk/testing/TestPipeline.java | 1 -
.../beam/sdk/testing/TestPipelineOptions.java | 1 +
.../apache/beam/sdk/transforms/Aggregator.java | 14 +-
.../sdk/transforms/AggregatorRetriever.java | 6 +-
.../org/apache/beam/sdk/transforms/Combine.java | 14 +-
.../apache/beam/sdk/transforms/CombineFns.java | 4 +-
.../org/apache/beam/sdk/transforms/Count.java | 2 +-
.../org/apache/beam/sdk/transforms/Create.java | 2 +-
.../org/apache/beam/sdk/transforms/DoFn.java | 418 +++++---------
.../beam/sdk/transforms/DoFnReflector.java | 116 ++--
.../apache/beam/sdk/transforms/DoFnTester.java | 88 +--
.../beam/sdk/transforms/DoFnWithContext.java | 429 --------------
.../org/apache/beam/sdk/transforms/Filter.java | 2 +-
.../beam/sdk/transforms/FlatMapElements.java | 2 +-
.../org/apache/beam/sdk/transforms/Flatten.java | 2 +-
.../apache/beam/sdk/transforms/GroupByKey.java | 2 +-
.../transforms/IntraBundleParallelization.java | 40 +-
.../org/apache/beam/sdk/transforms/Keys.java | 2 +-
.../org/apache/beam/sdk/transforms/KvSwap.java | 2 +-
.../apache/beam/sdk/transforms/MapElements.java | 2 +-
.../org/apache/beam/sdk/transforms/OldDoFn.java | 565 +++++++++++++++++++
.../apache/beam/sdk/transforms/PTransform.java | 2 +-
.../org/apache/beam/sdk/transforms/ParDo.java | 219 +++----
.../apache/beam/sdk/transforms/Partition.java | 2 +-
.../beam/sdk/transforms/RemoveDuplicates.java | 2 +-
.../org/apache/beam/sdk/transforms/Sample.java | 4 +-
.../beam/sdk/transforms/SimpleFunction.java | 6 +-
.../org/apache/beam/sdk/transforms/Values.java | 2 +-
.../org/apache/beam/sdk/transforms/View.java | 8 +-
.../apache/beam/sdk/transforms/WithKeys.java | 2 +-
.../beam/sdk/transforms/WithTimestamps.java | 4 +-
.../sdk/transforms/display/DisplayData.java | 1 -
.../beam/sdk/transforms/join/CoGbkResult.java | 1 -
.../beam/sdk/transforms/join/CoGroupByKey.java | 14 +-
.../sdk/transforms/windowing/AfterEach.java | 1 +
.../windowing/AfterProcessingTime.java | 1 +
.../transforms/windowing/IntervalWindow.java | 1 -
.../beam/sdk/transforms/windowing/Never.java | 1 +
.../beam/sdk/transforms/windowing/PaneInfo.java | 10 +-
.../beam/sdk/transforms/windowing/Window.java | 4 +-
.../beam/sdk/util/BaseExecutionContext.java | 4 +-
.../apache/beam/sdk/util/BucketingFunction.java | 1 +
.../beam/sdk/util/CombineContextFactory.java | 6 +-
.../apache/beam/sdk/util/ExecutionContext.java | 8 +-
.../apache/beam/sdk/util/MovingFunction.java | 1 +
.../beam/sdk/util/PerKeyCombineFnRunner.java | 44 +-
.../beam/sdk/util/PerKeyCombineFnRunners.java | 30 +-
.../org/apache/beam/sdk/util/PubsubClient.java | 1 +
.../apache/beam/sdk/util/PubsubTestClient.java | 1 +
.../sdk/util/ReifyTimestampAndWindowsDoFn.java | 6 +-
.../org/apache/beam/sdk/util/Reshuffle.java | 4 +-
.../apache/beam/sdk/util/SerializableUtils.java | 2 +-
.../org/apache/beam/sdk/util/StringUtils.java | 2 +-
.../beam/sdk/util/SystemDoFnInternal.java | 6 +-
.../apache/beam/sdk/util/TimerInternals.java | 1 -
.../apache/beam/sdk/util/ValueWithRecordId.java | 6 +-
.../org/apache/beam/sdk/util/WindowedValue.java | 1 -
.../beam/sdk/util/WindowingInternals.java | 4 +-
.../beam/sdk/util/common/ReflectHelpers.java | 1 +
.../beam/sdk/values/TimestampedValue.java | 1 -
.../java/org/apache/beam/sdk/PipelineTest.java | 6 +-
.../apache/beam/sdk/coders/AvroCoderTest.java | 4 +-
.../beam/sdk/coders/CoderRegistryTest.java | 6 +-
.../beam/sdk/coders/SerializableCoderTest.java | 6 +-
.../org/apache/beam/sdk/io/AvroSourceTest.java | 1 +
.../io/BoundedReadFromUnboundedSourceTest.java | 1 +
.../beam/sdk/io/CompressedSourceTest.java | 1 +
.../apache/beam/sdk/io/CountingInputTest.java | 5 +-
.../apache/beam/sdk/io/CountingSourceTest.java | 4 +-
.../beam/sdk/io/OffsetBasedSourceTest.java | 1 +
.../beam/sdk/io/PubsubUnboundedSinkTest.java | 4 +-
.../java/org/apache/beam/sdk/io/ReadTest.java | 1 +
.../java/org/apache/beam/sdk/io/TextIOTest.java | 1 +
.../java/org/apache/beam/sdk/io/WriteTest.java | 7 +-
.../org/apache/beam/sdk/io/XmlSinkTest.java | 1 +
.../apache/beam/sdk/options/GcpOptionsTest.java | 1 +
.../sdk/options/GoogleApiDebugOptionsTest.java | 1 -
.../sdk/options/PipelineOptionsFactoryTest.java | 1 -
.../beam/sdk/options/PipelineOptionsTest.java | 1 -
.../sdk/options/ProxyInvocationHandlerTest.java | 2 +-
.../AggregatorPipelineExtractorTest.java | 6 +-
.../apache/beam/sdk/testing/PAssertTest.java | 1 -
.../beam/sdk/testing/TestPipelineTest.java | 1 -
.../transforms/ApproximateQuantilesTest.java | 1 +
.../sdk/transforms/ApproximateUniqueTest.java | 5 +-
.../beam/sdk/transforms/CombineFnsTest.java | 2 +-
.../apache/beam/sdk/transforms/CombineTest.java | 12 +-
.../apache/beam/sdk/transforms/CreateTest.java | 2 +-
.../beam/sdk/transforms/DoFnContextTest.java | 69 ---
.../DoFnDelegatingAggregatorTest.java | 16 +-
.../beam/sdk/transforms/DoFnReflectorTest.java | 88 +--
.../apache/beam/sdk/transforms/DoFnTest.java | 131 +++--
.../beam/sdk/transforms/DoFnTesterTest.java | 10 +-
.../sdk/transforms/DoFnWithContextTest.java | 237 --------
.../apache/beam/sdk/transforms/FlattenTest.java | 4 +-
.../beam/sdk/transforms/GroupByKeyTest.java | 6 +-
.../IntraBundleParallelizationTest.java | 23 +-
.../beam/sdk/transforms/MapElementsTest.java | 1 +
.../org/apache/beam/sdk/transforms/MaxTest.java | 1 +
.../org/apache/beam/sdk/transforms/MinTest.java | 2 +
.../apache/beam/sdk/transforms/NoOpDoFn.java | 144 -----
.../apache/beam/sdk/transforms/NoOpOldDoFn.java | 144 +++++
.../beam/sdk/transforms/OldDoFnContextTest.java | 69 +++
.../apache/beam/sdk/transforms/OldDoFnTest.java | 242 ++++++++
.../apache/beam/sdk/transforms/ParDoTest.java | 108 ++--
.../beam/sdk/transforms/PartitionTest.java | 1 +
.../apache/beam/sdk/transforms/SampleTest.java | 1 +
.../org/apache/beam/sdk/transforms/TopTest.java | 1 +
.../apache/beam/sdk/transforms/ViewTest.java | 398 ++++++-------
.../beam/sdk/transforms/WithTimestampsTest.java | 8 +-
.../display/DisplayDataEvaluatorTest.java | 6 +-
.../display/DisplayDataMatchersTest.java | 1 +
.../sdk/transforms/display/DisplayDataTest.java | 6 +-
.../dofnreflector/DoFnReflectorTestHelper.java | 26 +-
.../sdk/transforms/join/CoGroupByKeyTest.java | 18 +-
.../sdk/transforms/windowing/NeverTest.java | 1 +
.../sdk/transforms/windowing/WindowTest.java | 6 +-
.../sdk/transforms/windowing/WindowingTest.java | 10 +-
.../beam/sdk/util/BucketingFunctionTest.java | 4 +-
.../beam/sdk/util/MovingFunctionTest.java | 4 +-
.../beam/sdk/util/SerializableUtilsTest.java | 1 -
.../apache/beam/sdk/util/SerializerTest.java | 1 -
.../apache/beam/sdk/util/StringUtilsTest.java | 16 +-
.../org/apache/beam/sdk/util/TriggerTester.java | 1 +
.../beam/sdk/util/common/CounterTest.java | 1 +
.../beam/sdk/values/PCollectionTupleTest.java | 4 +-
.../apache/beam/sdk/values/TypedPValueTest.java | 6 +-
.../beam/sdk/extensions/joinlibrary/Join.java | 8 +-
.../beam/sdk/io/gcp/bigquery/BigQueryIO.java | 18 +-
.../beam/sdk/io/gcp/bigtable/BigtableIO.java | 4 +-
.../beam/sdk/io/gcp/datastore/V1Beta3.java | 13 +-
.../sdk/io/gcp/bigquery/BigQueryIOTest.java | 6 +-
.../sdk/io/gcp/bigtable/BigtableWriteIT.java | 4 +-
.../sdk/io/gcp/datastore/V1Beta3TestUtil.java | 6 +-
.../java/org/apache/beam/sdk/io/jms/JmsIO.java | 4 +-
.../org/apache/beam/sdk/io/kafka/KafkaIO.java | 8 +-
.../apache/beam/sdk/io/kafka/KafkaIOTest.java | 7 +-
.../sdk/transforms/WithTimestampsJava8Test.java | 4 +-
.../src/main/java/DebuggingWordCount.java | 4 +-
.../src/main/java/MinimalWordCount.java | 6 +-
.../src/main/java/WindowedWordCount.java | 6 +-
.../src/main/java/WordCount.java | 6 +-
.../main/java/common/PubsubFileInjector.java | 4 +-
.../src/main/java/StarterPipeline.java | 6 +-
.../src/main/java/it/pkg/StarterPipeline.java | 6 +-
.../transforms/DoFnReflectorBenchmark.java | 49 +-
265 files changed, 2641 insertions(+), 2596 deletions(-)
----------------------------------------------------------------------
[06/19] incubator-beam git commit: Rename DoFn to OldDoFn
Posted by dh...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
index 3b314b2..8b00c03 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnWithContextTest.java
@@ -142,9 +142,9 @@ public class DoFnWithContextTest implements Serializable {
@Test
public void testDoFnWithContextUsingAggregators() {
NoOpDoFn<Object, Object> noOpFn = new NoOpDoFn<>();
- DoFn<Object, Object>.Context context = noOpFn.context();
+ OldDoFn<Object, Object>.Context context = noOpFn.context();
- DoFn<Object, Object> fn = spy(noOpFn);
+ OldDoFn<Object, Object> fn = spy(noOpFn);
context = spy(context);
@SuppressWarnings("unchecked")
@@ -225,7 +225,7 @@ public class DoFnWithContextTest implements Serializable {
}
/**
- * Initialize a test pipeline with the specified {@link DoFn}.
+ * Initialize a test pipeline with the specified {@link OldDoFn}.
*/
private <InputT, OutputT> TestPipeline createTestPipeline(DoFnWithContext<InputT, OutputT> fn) {
TestPipeline pipeline = TestPipeline.create();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
index 80825cb..b81eedb 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java
@@ -130,7 +130,7 @@ public class FlattenTest implements Serializable {
PCollection<String> output = p
.apply(Create.of((Void) null).withCoder(VoidCoder.of()))
- .apply(ParDo.withSideInputs(view).of(new DoFn<Void, String>() {
+ .apply(ParDo.withSideInputs(view).of(new OldDoFn<Void, String>() {
@Override
public void processElement(ProcessContext c) {
for (String side : c.sideInput(view)) {
@@ -339,7 +339,7 @@ public class FlattenTest implements Serializable {
/////////////////////////////////////////////////////////////////////////////
- private static class IdentityFn<T> extends DoFn<T, T> {
+ private static class IdentityFn<T> extends OldDoFn<T, T> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
index d6e4589..15c3ba8 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.transforms;
import static org.apache.beam.sdk.TestUtils.KvMatcher.isKv;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.Matchers.empty;
@@ -55,7 +56,6 @@ import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.fasterxml.jackson.annotation.JsonCreator;
-
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.junit.Assert;
@@ -371,7 +371,7 @@ public class GroupByKeyTest {
pipeline.run();
}
- private static class AssertTimestamp<K, V> extends DoFn<KV<K, V>, Void> {
+ private static class AssertTimestamp<K, V> extends OldDoFn<KV<K, V>, Void> {
private final Instant timestamp;
public AssertTimestamp(Instant timestamp) {
@@ -506,7 +506,7 @@ public class GroupByKeyTest {
* Creates a KV that wraps the original KV together with a random key.
*/
static class AssignRandomKey
- extends DoFn<KV<BadEqualityKey, Long>, KV<Long, KV<BadEqualityKey, Long>>> {
+ extends OldDoFn<KV<BadEqualityKey, Long>, KV<Long, KV<BadEqualityKey, Long>>> {
@Override
public void processElement(ProcessContext c) throws Exception {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java
index 3355aeb..fa2fae9 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java
@@ -20,6 +20,7 @@ package org.apache.beam.sdk.transforms;
import static org.apache.beam.sdk.testing.SystemNanoTimeSleeper.sleepMillis;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
import static org.hamcrest.Matchers.both;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@@ -74,7 +75,7 @@ public class IntraBundleParallelizationTest {
/**
* Introduces a delay in processing, then passes thru elements.
*/
- private static class DelayFn<T> extends DoFn<T, T> {
+ private static class DelayFn<T> extends OldDoFn<T, T> {
public static final long DELAY_MS = 25;
@Override
@@ -94,7 +95,7 @@ public class IntraBundleParallelizationTest {
/**
* Throws an exception after some number of calls.
*/
- private static class ExceptionThrowingFn<T> extends DoFn<T, T> {
+ private static class ExceptionThrowingFn<T> extends OldDoFn<T, T> {
private ExceptionThrowingFn(int numSuccesses) {
IntraBundleParallelizationTest.numSuccesses.set(numSuccesses);
}
@@ -120,11 +121,11 @@ public class IntraBundleParallelizationTest {
/**
* Measures concurrency of the processElement method.
*/
- private static class ConcurrencyMeasuringFn<T> extends DoFn<T, T> {
+ private static class ConcurrencyMeasuringFn<T> extends OldDoFn<T, T> {
@Override
public void processElement(ProcessContext c) {
// Synchronize on the class to provide synchronous access irrespective of
- // how this DoFn is called.
+ // how this OldDoFn is called.
synchronized (ConcurrencyMeasuringFn.class) {
concurrentElements++;
if (concurrentElements > maxDownstreamConcurrency) {
@@ -154,8 +155,8 @@ public class IntraBundleParallelizationTest {
}
/**
- * Test that the DoFn is parallelized up the the Max Parallelism factor within a bundle, but not
- * greater than that amount.
+ * Test that the OldDoFn is parallelized up the the Max Parallelism factor within a bundle, but
+ * not greater than that amount.
*/
@Test
@Category(NeedsRunner.class)
@@ -224,7 +225,7 @@ public class IntraBundleParallelizationTest {
@Test
public void testDisplayData() {
- DoFn<String, String> fn = new DoFn<String, String>() {
+ OldDoFn<String, String> fn = new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) throws Exception {
}
@@ -248,15 +249,15 @@ public class IntraBundleParallelizationTest {
/**
* Runs the provided doFn inside of an {@link IntraBundleParallelization} transform.
*
- * <p>This method assumes that the DoFn passed to it will call {@link #startConcurrentCall()}
+ * <p>This method assumes that the OldDoFn passed to it will call {@link #startConcurrentCall()}
* before processing each elements and {@link #finishConcurrentCall()} after each element.
*
* @param numElements the size of the input
* @param maxParallelism how many threads to execute in parallel
- * @param doFn the DoFn to execute
- * @return the maximum observed parallelism of the DoFn
+ * @param doFn the OldDoFn to execute
+ * @return the maximum observed parallelism of the OldDoFn
*/
- private int run(int numElements, int maxParallelism, DoFn<Integer, Integer> doFn) {
+ private int run(int numElements, int maxParallelism, OldDoFn<Integer, Integer> doFn) {
Pipeline pipeline = TestPipeline.create();
ArrayList<Integer> data = new ArrayList<>(numElements);
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
index f18504c..b4751d2 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java
@@ -18,6 +18,7 @@
package org.apache.beam.sdk.transforms;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.junit.Assert.assertThat;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java
index 226255a..87fa554 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java
@@ -19,6 +19,7 @@ package org.apache.beam.sdk.transforms;
import static org.apache.beam.sdk.TestUtils.checkCombineFn;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java
index d7ec322..cd03a74 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java
@@ -20,10 +20,12 @@ package org.apache.beam.sdk.transforms;
import static org.apache.beam.sdk.TestUtils.checkCombineFn;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import org.apache.beam.sdk.transforms.display.DisplayData;
+
import com.google.common.collect.Lists;
import org.junit.Test;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java
index a389fac..5c43755 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpDoFn.java
@@ -28,35 +28,35 @@ import org.apache.beam.sdk.values.TupleTag;
import org.joda.time.Instant;
/**
- * A {@link DoFn} that does nothing with provided elements. Used for testing
- * methods provided by the DoFn abstract class.
+ * A {@link OldDoFn} that does nothing with provided elements. Used for testing
+ * methods provided by the OldDoFn abstract class.
*
* @param <InputT> unused.
* @param <OutputT> unused.
*/
-class NoOpDoFn<InputT, OutputT> extends DoFn<InputT, OutputT> {
+class NoOpDoFn<InputT, OutputT> extends OldDoFn<InputT, OutputT> {
@Override
- public void processElement(DoFn<InputT, OutputT>.ProcessContext c) throws Exception {
+ public void processElement(OldDoFn<InputT, OutputT>.ProcessContext c) throws Exception {
}
/**
* Returns a new NoOp Context.
*/
- public DoFn<InputT, OutputT>.Context context() {
+ public OldDoFn<InputT, OutputT>.Context context() {
return new NoOpDoFnContext();
}
/**
* Returns a new NoOp Process Context.
*/
- public DoFn<InputT, OutputT>.ProcessContext processContext() {
+ public OldDoFn<InputT, OutputT>.ProcessContext processContext() {
return new NoOpDoFnProcessContext();
}
/**
- * A {@link DoFn.Context} that does nothing and returns exclusively null.
+ * A {@link OldDoFn.Context} that does nothing and returns exclusively null.
*/
- private class NoOpDoFnContext extends DoFn<InputT, OutputT>.Context {
+ private class NoOpDoFnContext extends OldDoFn<InputT, OutputT>.Context {
@Override
public PipelineOptions getPipelineOptions() {
return null;
@@ -82,10 +82,10 @@ class NoOpDoFn<InputT, OutputT> extends DoFn<InputT, OutputT> {
}
/**
- * A {@link DoFn.ProcessContext} that does nothing and returns exclusively
+ * A {@link OldDoFn.ProcessContext} that does nothing and returns exclusively
* null.
*/
- private class NoOpDoFnProcessContext extends DoFn<InputT, OutputT>.ProcessContext {
+ private class NoOpDoFnProcessContext extends OldDoFn<InputT, OutputT>.ProcessContext {
@Override
public InputT element() {
return null;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java
new file mode 100644
index 0000000..9234ccb
--- /dev/null
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnContextTest.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.transforms;
+
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+/**
+ * Tests for {@link OldDoFn.Context}.
+ */
+@RunWith(JUnit4.class)
+public class OldDoFnContextTest {
+
+ @Mock
+ private Aggregator<Long, Long> agg;
+
+ private OldDoFn<Object, Object> fn;
+ private OldDoFn<Object, Object>.Context context;
+
+ @Before
+ public void setup() {
+ MockitoAnnotations.initMocks(this);
+
+ // Need to be real objects to call the constructor, and to reference the
+ // outer instance of OldDoFn
+ NoOpDoFn<Object, Object> noOpFn = new NoOpDoFn<>();
+ OldDoFn<Object, Object>.Context noOpContext = noOpFn.context();
+
+ fn = spy(noOpFn);
+ context = spy(noOpContext);
+ }
+
+ @Test
+ public void testSetupDelegateAggregatorsCreatesAndLinksDelegateAggregators() {
+ Sum.SumLongFn combiner = new Sum.SumLongFn();
+ Aggregator<Long, Long> delegateAggregator =
+ fn.createAggregator("test", combiner);
+
+ when(context.createAggregatorInternal("test", combiner)).thenReturn(agg);
+
+ context.setupDelegateAggregators();
+ delegateAggregator.addValue(1L);
+
+ verify(agg).addValue(1L);
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java
new file mode 100644
index 0000000..49f4366
--- /dev/null
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java
@@ -0,0 +1,242 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.transforms;
+
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.isA;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertThat;
+
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.Pipeline.PipelineExecutionException;
+import org.apache.beam.sdk.PipelineResult;
+import org.apache.beam.sdk.runners.AggregatorValues;
+import org.apache.beam.sdk.testing.NeedsRunner;
+import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.Combine.CombineFn;
+import org.apache.beam.sdk.transforms.Max.MaxIntegerFn;
+import org.apache.beam.sdk.transforms.Sum.SumIntegerFn;
+import org.apache.beam.sdk.transforms.display.DisplayData;
+
+import com.google.common.collect.ImmutableMap;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.io.Serializable;
+import java.util.Map;
+
+/**
+ * Tests for OldDoFn.
+ */
+@RunWith(JUnit4.class)
+public class OldDoFnTest implements Serializable {
+
+ @Rule
+ public transient ExpectedException thrown = ExpectedException.none();
+
+ @Test
+ public void testCreateAggregatorWithCombinerSucceeds() {
+ String name = "testAggregator";
+ Sum.SumLongFn combiner = new Sum.SumLongFn();
+
+ OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+ Aggregator<Long, Long> aggregator = doFn.createAggregator(name, combiner);
+
+ assertEquals(name, aggregator.getName());
+ assertEquals(combiner, aggregator.getCombineFn());
+ }
+
+ @Test
+ public void testCreateAggregatorWithNullNameThrowsException() {
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("name cannot be null");
+
+ OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+ doFn.createAggregator(null, new Sum.SumLongFn());
+ }
+
+ @Test
+ public void testCreateAggregatorWithNullCombineFnThrowsException() {
+ CombineFn<Object, Object, Object> combiner = null;
+
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("combiner cannot be null");
+
+ OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+ doFn.createAggregator("testAggregator", combiner);
+ }
+
+ @Test
+ public void testCreateAggregatorWithNullSerializableFnThrowsException() {
+ SerializableFunction<Iterable<Object>, Object> combiner = null;
+
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("combiner cannot be null");
+
+ OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+ doFn.createAggregator("testAggregator", combiner);
+ }
+
+ @Test
+ public void testCreateAggregatorWithSameNameThrowsException() {
+ String name = "testAggregator";
+ CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
+
+ OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+ doFn.createAggregator(name, combiner);
+
+ thrown.expect(IllegalArgumentException.class);
+ thrown.expectMessage("Cannot create");
+ thrown.expectMessage(name);
+ thrown.expectMessage("already exists");
+
+ doFn.createAggregator(name, combiner);
+ }
+
+ @Test
+ public void testCreateAggregatorsWithDifferentNamesSucceeds() {
+ String nameOne = "testAggregator";
+ String nameTwo = "aggregatorPrime";
+ CombineFn<Double, ?, Double> combiner = new Max.MaxDoubleFn();
+
+ OldDoFn<Void, Void> doFn = new NoOpDoFn<>();
+
+ Aggregator<Double, Double> aggregatorOne =
+ doFn.createAggregator(nameOne, combiner);
+ Aggregator<Double, Double> aggregatorTwo =
+ doFn.createAggregator(nameTwo, combiner);
+
+ assertNotEquals(aggregatorOne, aggregatorTwo);
+ }
+
+ @Test
+ @Category(NeedsRunner.class)
+ public void testCreateAggregatorInStartBundleThrows() {
+ TestPipeline p = createTestPipeline(new OldDoFn<String, String>() {
+ @Override
+ public void startBundle(OldDoFn<String, String>.Context c) throws Exception {
+ createAggregator("anyAggregate", new MaxIntegerFn());
+ }
+
+ @Override
+ public void processElement(OldDoFn<String, String>.ProcessContext c) throws Exception {}
+ });
+
+ thrown.expect(PipelineExecutionException.class);
+ thrown.expectCause(isA(IllegalStateException.class));
+
+ p.run();
+ }
+
+ @Test
+ @Category(NeedsRunner.class)
+ public void testCreateAggregatorInProcessElementThrows() {
+ TestPipeline p = createTestPipeline(new OldDoFn<String, String>() {
+ @Override
+ public void processElement(ProcessContext c) throws Exception {
+ createAggregator("anyAggregate", new MaxIntegerFn());
+ }
+ });
+
+ thrown.expect(PipelineExecutionException.class);
+ thrown.expectCause(isA(IllegalStateException.class));
+
+ p.run();
+ }
+
+ @Test
+ @Category(NeedsRunner.class)
+ public void testCreateAggregatorInFinishBundleThrows() {
+ TestPipeline p = createTestPipeline(new OldDoFn<String, String>() {
+ @Override
+ public void finishBundle(OldDoFn<String, String>.Context c) throws Exception {
+ createAggregator("anyAggregate", new MaxIntegerFn());
+ }
+
+ @Override
+ public void processElement(OldDoFn<String, String>.ProcessContext c) throws Exception {}
+ });
+
+ thrown.expect(PipelineExecutionException.class);
+ thrown.expectCause(isA(IllegalStateException.class));
+
+ p.run();
+ }
+
+ /**
+ * Initialize a test pipeline with the specified {@link OldDoFn}.
+ */
+ private <InputT, OutputT> TestPipeline createTestPipeline(OldDoFn<InputT, OutputT> fn) {
+ TestPipeline pipeline = TestPipeline.create();
+ pipeline.apply(Create.of((InputT) null))
+ .apply(ParDo.of(fn));
+
+ return pipeline;
+ }
+
+ @Test
+ public void testPopulateDisplayDataDefaultBehavior() {
+ OldDoFn<String, String> usesDefault =
+ new OldDoFn<String, String>() {
+ @Override
+ public void processElement(ProcessContext c) throws Exception {}
+ };
+
+ DisplayData data = DisplayData.from(usesDefault);
+ assertThat(data.items(), empty());
+ }
+
+ @Test
+ @Category(NeedsRunner.class)
+ public void testAggregators() throws Exception {
+ Pipeline pipeline = TestPipeline.create();
+
+ CountOddsFn countOdds = new CountOddsFn();
+ pipeline
+ .apply(Create.of(1, 3, 5, 7, 2, 4, 6, 8, 10, 12, 14, 20, 42, 68, 100))
+ .apply(ParDo.of(countOdds));
+ PipelineResult result = pipeline.run();
+
+ AggregatorValues<Integer> values = result.getAggregatorValues(countOdds.aggregator);
+ assertThat(values.getValuesAtSteps(),
+ equalTo((Map<String, Integer>) ImmutableMap.<String, Integer>of("ParDo(CountOdds)", 4)));
+ }
+
+ private static class CountOddsFn extends OldDoFn<Integer, Void> {
+ @Override
+ public void processElement(ProcessContext c) throws Exception {
+ if (c.element() % 2 == 1) {
+ aggregator.addValue(1);
+ }
+ }
+
+ Aggregator<Integer, Integer> aggregator =
+ createAggregator("odds", new SumIntegerFn());
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
index 868270c..0a6eab0 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
@@ -26,6 +26,7 @@ import static org.apache.beam.sdk.util.StringUtils.byteArrayToJsonString;
import static org.apache.beam.sdk.util.StringUtils.jsonStringToByteArray;
import static com.google.common.base.Preconditions.checkNotNull;
+
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.equalTo;
@@ -43,7 +44,7 @@ import org.apache.beam.sdk.testing.NeedsRunner;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.RunnableOnService;
import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.ParDo.Bound;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.display.DisplayData.Builder;
@@ -59,7 +60,6 @@ import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.sdk.values.TupleTagList;
import com.fasterxml.jackson.annotation.JsonCreator;
-
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.junit.Rule;
@@ -89,7 +89,9 @@ public class ParDoTest implements Serializable {
@Rule
public transient ExpectedException thrown = ExpectedException.none();
- private static class PrintingDoFn extends DoFn<String, String> implements RequiresWindowAccess {
+ private static class PrintingOldDoFn extends OldDoFn<String, String> implements
+ RequiresWindowAccess {
+
@Override
public void processElement(ProcessContext c) {
c.output(c.element() + ":" + c.timestamp().getMillis()
@@ -97,17 +99,17 @@ public class ParDoTest implements Serializable {
}
}
- static class TestDoFn extends DoFn<Integer, String> {
+ static class TestOldDoFn extends OldDoFn<Integer, String> {
enum State { UNSTARTED, STARTED, PROCESSING, FINISHED }
State state = State.UNSTARTED;
final List<PCollectionView<Integer>> sideInputViews = new ArrayList<>();
final List<TupleTag<String>> sideOutputTupleTags = new ArrayList<>();
- public TestDoFn() {
+ public TestOldDoFn() {
}
- public TestDoFn(List<PCollectionView<Integer>> sideInputViews,
+ public TestOldDoFn(List<PCollectionView<Integer>> sideInputViews,
List<TupleTag<String>> sideOutputTupleTags) {
this.sideInputViews.addAll(sideInputViews);
this.sideOutputTupleTags.addAll(sideOutputTupleTags);
@@ -161,9 +163,9 @@ public class ParDoTest implements Serializable {
}
}
- static class TestNoOutputDoFn extends DoFn<Integer, String> {
+ static class TestNoOutputDoFn extends OldDoFn<Integer, String> {
@Override
- public void processElement(DoFn<Integer, String>.ProcessContext c) throws Exception {}
+ public void processElement(OldDoFn<Integer, String>.ProcessContext c) throws Exception {}
}
static class TestDoFnWithContext extends DoFnWithContext<Integer, String> {
@@ -229,7 +231,7 @@ public class ParDoTest implements Serializable {
}
}
- static class TestStartBatchErrorDoFn extends DoFn<Integer, String> {
+ static class TestStartBatchErrorDoFn extends OldDoFn<Integer, String> {
@Override
public void startBundle(Context c) {
throw new RuntimeException("test error in initialize");
@@ -241,14 +243,14 @@ public class ParDoTest implements Serializable {
}
}
- static class TestProcessElementErrorDoFn extends DoFn<Integer, String> {
+ static class TestProcessElementErrorDoFn extends OldDoFn<Integer, String> {
@Override
public void processElement(ProcessContext c) {
throw new RuntimeException("test error in process");
}
}
- static class TestFinishBatchErrorDoFn extends DoFn<Integer, String> {
+ static class TestFinishBatchErrorDoFn extends OldDoFn<Integer, String> {
@Override
public void processElement(ProcessContext c) {
// This has to be here.
@@ -260,13 +262,13 @@ public class ParDoTest implements Serializable {
}
}
- private static class StrangelyNamedDoer extends DoFn<Integer, String> {
+ private static class StrangelyNamedDoer extends OldDoFn<Integer, String> {
@Override
public void processElement(ProcessContext c) {
}
}
- static class TestOutputTimestampDoFn extends DoFn<Integer, Integer> {
+ static class TestOutputTimestampDoFn extends OldDoFn<Integer, Integer> {
@Override
public void processElement(ProcessContext c) {
Integer value = c.element();
@@ -274,7 +276,7 @@ public class ParDoTest implements Serializable {
}
}
- static class TestShiftTimestampDoFn extends DoFn<Integer, Integer> {
+ static class TestShiftTimestampDoFn extends OldDoFn<Integer, Integer> {
private Duration allowedTimestampSkew;
private Duration durationToShift;
@@ -297,7 +299,7 @@ public class ParDoTest implements Serializable {
}
}
- static class TestFormatTimestampDoFn extends DoFn<Integer, String> {
+ static class TestFormatTimestampDoFn extends OldDoFn<Integer, String> {
@Override
public void processElement(ProcessContext c) {
checkNotNull(c.timestamp());
@@ -318,7 +320,7 @@ public class ParDoTest implements Serializable {
return PCollectionTuple.of(BY2, by2).and(BY3, by3);
}
- static class FilterFn extends DoFn<Integer, Integer> {
+ static class FilterFn extends OldDoFn<Integer, Integer> {
private final int divisor;
FilterFn(int divisor) {
@@ -343,7 +345,7 @@ public class ParDoTest implements Serializable {
PCollection<String> output = pipeline
.apply(Create.of(inputs))
- .apply(ParDo.of(new TestDoFn()));
+ .apply(ParDo.of(new TestOldDoFn()));
PAssert.that(output)
.satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
@@ -377,7 +379,7 @@ public class ParDoTest implements Serializable {
PCollection<String> output = pipeline
.apply(Create.of(inputs).withCoder(VarIntCoder.of()))
- .apply("TestDoFn", ParDo.of(new TestDoFn()));
+ .apply("TestOldDoFn", ParDo.of(new TestOldDoFn()));
PAssert.that(output)
.satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
@@ -395,7 +397,7 @@ public class ParDoTest implements Serializable {
PCollection<String> output = pipeline
.apply(Create.of(inputs).withCoder(VarIntCoder.of()))
- .apply("TestDoFn", ParDo.of(new TestNoOutputDoFn()));
+ .apply("TestOldDoFn", ParDo.of(new TestNoOutputDoFn()));
PAssert.that(output).empty();
@@ -418,7 +420,7 @@ public class ParDoTest implements Serializable {
PCollectionTuple outputs = pipeline
.apply(Create.of(inputs))
.apply(ParDo
- .of(new TestDoFn(
+ .of(new TestOldDoFn(
Arrays.<PCollectionView<Integer>>asList(),
Arrays.asList(sideOutputTag1, sideOutputTag2, sideOutputTag3)))
.withOutputTags(
@@ -461,7 +463,7 @@ public class ParDoTest implements Serializable {
PCollectionTuple outputs = pipeline
.apply(Create.of(inputs))
.apply(ParDo
- .of(new TestDoFn(
+ .of(new TestOldDoFn(
Arrays.<PCollectionView<Integer>>asList(),
Arrays.asList(sideOutputTag1, sideOutputTag2, sideOutputTag3)))
.withOutputTags(
@@ -527,7 +529,7 @@ public class ParDoTest implements Serializable {
PCollectionTuple outputs = pipeline
.apply(Create.of(inputs))
.apply(ParDo.withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag))
- .of(new DoFn<Integer, Void>(){
+ .of(new OldDoFn<Integer, Void>(){
@Override
public void processElement(ProcessContext c) {
c.sideOutput(sideOutputTag, c.element());
@@ -550,7 +552,7 @@ public class ParDoTest implements Serializable {
PCollection<String> output = pipeline
.apply(Create.of(inputs))
- .apply(ParDo.of(new TestDoFn(
+ .apply(ParDo.of(new TestOldDoFn(
Arrays.<PCollectionView<Integer>>asList(),
Arrays.asList(sideTag))));
@@ -569,7 +571,7 @@ public class ParDoTest implements Serializable {
// Success for a total of 1000 outputs.
input
- .apply("Success1000", ParDo.of(new DoFn<Integer, String>() {
+ .apply("Success1000", ParDo.of(new OldDoFn<Integer, String>() {
@Override
public void processElement(ProcessContext c) {
TupleTag<String> specialSideTag = new TupleTag<String>(){};
@@ -585,7 +587,7 @@ public class ParDoTest implements Serializable {
// Failure for a total of 1001 outputs.
input
- .apply("Failure1001", ParDo.of(new DoFn<Integer, String>() {
+ .apply("Failure1001", ParDo.of(new OldDoFn<Integer, String>() {
@Override
public void processElement(ProcessContext c) {
for (int i = 0; i < 1000; i++) {
@@ -618,7 +620,7 @@ public class ParDoTest implements Serializable {
PCollection<String> output = pipeline
.apply(Create.of(inputs))
.apply(ParDo.withSideInputs(sideInput1, sideInputUnread, sideInput2)
- .of(new TestDoFn(
+ .of(new TestOldDoFn(
Arrays.asList(sideInput1, sideInput2),
Arrays.<TupleTag<String>>asList())));
@@ -652,7 +654,7 @@ public class ParDoTest implements Serializable {
.apply(ParDo.withSideInputs(sideInput1)
.withSideInputs(sideInputUnread)
.withSideInputs(sideInput2)
- .of(new TestDoFn(
+ .of(new TestOldDoFn(
Arrays.asList(sideInput1, sideInput2),
Arrays.<TupleTag<String>>asList())));
@@ -690,7 +692,7 @@ public class ParDoTest implements Serializable {
.withSideInputs(sideInputUnread)
.withSideInputs(sideInput2)
.withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag))
- .of(new TestDoFn(
+ .of(new TestOldDoFn(
Arrays.asList(sideInput1, sideInput2),
Arrays.<TupleTag<String>>asList())));
@@ -728,7 +730,7 @@ public class ParDoTest implements Serializable {
.withSideInputs(sideInputUnread)
.withSideInputs(sideInput2)
.withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag))
- .of(new TestDoFn(
+ .of(new TestOldDoFn(
Arrays.asList(sideInput1, sideInput2),
Arrays.<TupleTag<String>>asList())));
@@ -752,7 +754,7 @@ public class ParDoTest implements Serializable {
.apply(View.<Integer>asSingleton());
pipeline.apply("CreateMain", Create.of(inputs))
- .apply(ParDo.of(new TestDoFn(
+ .apply(ParDo.of(new TestOldDoFn(
Arrays.<PCollectionView<Integer>>asList(sideView),
Arrays.<TupleTag<String>>asList())));
@@ -815,18 +817,18 @@ public class ParDoTest implements Serializable {
.setName("MyInput");
{
- PCollection<String> output1 = input.apply(ParDo.of(new TestDoFn()));
+ PCollection<String> output1 = input.apply(ParDo.of(new TestOldDoFn()));
assertEquals("ParDo(Test).out", output1.getName());
}
{
- PCollection<String> output2 = input.apply("MyParDo", ParDo.of(new TestDoFn()));
+ PCollection<String> output2 = input.apply("MyParDo", ParDo.of(new TestOldDoFn()));
assertEquals("MyParDo.out", output2.getName());
}
{
- PCollection<String> output4 = input.apply("TestDoFn", ParDo.of(new TestDoFn()));
- assertEquals("TestDoFn.out", output4.getName());
+ PCollection<String> output4 = input.apply("TestOldDoFn", ParDo.of(new TestOldDoFn()));
+ assertEquals("TestOldDoFn.out", output4.getName());
}
{
@@ -835,7 +837,7 @@ public class ParDoTest implements Serializable {
output5.getName());
}
- assertEquals("ParDo(Printing)", ParDo.of(new PrintingDoFn()).getName());
+ assertEquals("ParDo(Printing)", ParDo.of(new PrintingOldDoFn()).getName());
assertEquals(
"ParMultiDo(SideOutputDummy)",
@@ -855,7 +857,7 @@ public class ParDoTest implements Serializable {
PCollectionTuple outputs = p
.apply(Create.of(Arrays.asList(3, -42, 666))).setName("MyInput")
.apply("MyParDo", ParDo
- .of(new TestDoFn(
+ .of(new TestOldDoFn(
Arrays.<PCollectionView<Integer>>asList(),
Arrays.asList(sideOutputTag1, sideOutputTag2, sideOutputTag3)))
.withOutputTags(
@@ -883,7 +885,7 @@ public class ParDoTest implements Serializable {
.apply("CustomTransform", new PTransform<PCollection<Integer>, PCollection<String>>() {
@Override
public PCollection<String> apply(PCollection<Integer> input) {
- return input.apply(ParDo.of(new TestDoFn()));
+ return input.apply(ParDo.of(new TestOldDoFn()));
}
});
@@ -920,7 +922,7 @@ public class ParDoTest implements Serializable {
@Test
public void testJsonEscaping() {
// Declare an arbitrary function and make sure we can serialize it
- DoFn<Integer, Integer> doFn = new DoFn<Integer, Integer>() {
+ OldDoFn<Integer, Integer> doFn = new OldDoFn<Integer, Integer>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element() + 1);
@@ -973,7 +975,7 @@ public class ParDoTest implements Serializable {
}
}
- private static class SideOutputDummyFn extends DoFn<Integer, Integer> {
+ private static class SideOutputDummyFn extends OldDoFn<Integer, Integer> {
private TupleTag<TestDummy> sideTag;
public SideOutputDummyFn(TupleTag<TestDummy> sideTag) {
this.sideTag = sideTag;
@@ -985,7 +987,7 @@ public class ParDoTest implements Serializable {
}
}
- private static class MainOutputDummyFn extends DoFn<Integer, TestDummy> {
+ private static class MainOutputDummyFn extends OldDoFn<Integer, TestDummy> {
private TupleTag<Integer> sideTag;
public MainOutputDummyFn(TupleTag<Integer> sideTag) {
this.sideTag = sideTag;
@@ -1167,7 +1169,7 @@ public class ParDoTest implements Serializable {
.apply(ParDo
.withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag))
.of(
- new DoFn<TestDummy, TestDummy>() {
+ new OldDoFn<TestDummy, TestDummy>() {
@Override public void processElement(ProcessContext context) {
TestDummy element = context.element();
context.output(element);
@@ -1181,7 +1183,7 @@ public class ParDoTest implements Serializable {
// on a missing coder.
tuple.get(mainOutputTag)
.setCoder(TestDummyCoder.of())
- .apply("Output1", ParDo.of(new DoFn<TestDummy, Integer>() {
+ .apply("Output1", ParDo.of(new OldDoFn<TestDummy, Integer>() {
@Override public void processElement(ProcessContext context) {
context.output(1);
}
@@ -1228,7 +1230,7 @@ public class ParDoTest implements Serializable {
PCollection<String> output =
input
.apply(ParDo.withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag)).of(
- new DoFn<Integer, Integer>() {
+ new OldDoFn<Integer, Integer>() {
@Override
public void processElement(ProcessContext c) {
c.sideOutputWithTimestamp(
@@ -1349,7 +1351,7 @@ public class ParDoTest implements Serializable {
PCollection<String> output = pipeline
.apply(Create.timestamped(TimestampedValue.of("elem", new Instant(1))))
.apply(Window.<String>into(FixedWindows.of(Duration.millis(1))))
- .apply(ParDo.of(new DoFn<String, String>() {
+ .apply(ParDo.of(new OldDoFn<String, String>() {
@Override
public void startBundle(Context c) {
c.outputWithTimestamp("start", new Instant(2));
@@ -1368,7 +1370,7 @@ public class ParDoTest implements Serializable {
System.out.println("Finish: 3");
}
}))
- .apply(ParDo.of(new PrintingDoFn()));
+ .apply(ParDo.of(new PrintingOldDoFn()));
PAssert.that(output).satisfies(new Checker());
@@ -1383,7 +1385,7 @@ public class ParDoTest implements Serializable {
pipeline
.apply(Create.timestamped(TimestampedValue.of("elem", new Instant(1))))
.apply(Window.<String>into(FixedWindows.of(Duration.millis(1))))
- .apply(ParDo.of(new DoFn<String, String>() {
+ .apply(ParDo.of(new OldDoFn<String, String>() {
@Override
public void startBundle(Context c) {
c.output("start");
@@ -1400,7 +1402,7 @@ public class ParDoTest implements Serializable {
}
@Test
public void testDoFnDisplayData() {
- DoFn<String, String> fn = new DoFn<String, String>() {
+ OldDoFn<String, String> fn = new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) {
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java
index 243b52b..0cc804e 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java
@@ -18,6 +18,7 @@
package org.apache.beam.sdk.transforms;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java
index fe02573..e7f8cd0 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java
@@ -22,6 +22,7 @@ import static org.apache.beam.sdk.TestUtils.NO_LINES;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
import static com.google.common.base.Preconditions.checkArgument;
+
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java
index a96d19b..fc0e659 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java
@@ -18,6 +18,7 @@
package org.apache.beam.sdk.transforms;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
+
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java
index 738b492..ee240bf 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java
@@ -18,6 +18,7 @@
package org.apache.beam.sdk.transforms;
import static com.google.common.base.Preconditions.checkArgument;
+
import static org.hamcrest.Matchers.isA;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -98,12 +99,13 @@ public class ViewTest implements Serializable {
PCollection<Integer> output =
pipeline.apply("Create123", Create.of(1, 2, 3))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- c.output(c.sideInput(view));
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ c.output(c.sideInput(view));
+ }
+ }));
PAssert.that(output).containsInAnyOrder(47, 47, 47);
@@ -124,16 +126,17 @@ public class ViewTest implements Serializable {
PCollection<Integer> output =
pipeline.apply("Create123", Create.timestamped(
- TimestampedValue.of(1, new Instant(4)),
- TimestampedValue.of(2, new Instant(8)),
- TimestampedValue.of(3, new Instant(12))))
+ TimestampedValue.of(1, new Instant(4)),
+ TimestampedValue.of(2, new Instant(8)),
+ TimestampedValue.of(3, new Instant(12))))
.apply("MainWindowInto", Window.<Integer>into(FixedWindows.of(Duration.millis(10))))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- c.output(c.sideInput(view));
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ c.output(c.sideInput(view));
+ }
+ }));
PAssert.that(output).containsInAnyOrder(47, 47, 48);
@@ -150,7 +153,7 @@ public class ViewTest implements Serializable {
.apply(View.<Integer>asSingleton());
pipeline.apply("Create123", Create.of(1, 2, 3))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
+ .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.sideInput(view));
@@ -175,7 +178,7 @@ public class ViewTest implements Serializable {
final PCollectionView<Integer> view = oneTwoThree.apply(View.<Integer>asSingleton());
oneTwoThree.apply(
- "OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
+ "OutputSideInputs", ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.sideInput(view));
@@ -201,16 +204,17 @@ public class ViewTest implements Serializable {
PCollection<Integer> output =
pipeline.apply("CreateMainInput", Create.of(29, 31))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- checkArgument(c.sideInput(view).size() == 4);
- checkArgument(c.sideInput(view).get(0) == c.sideInput(view).get(0));
- for (Integer i : c.sideInput(view)) {
- c.output(i);
- }
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ checkArgument(c.sideInput(view).size() == 4);
+ checkArgument(c.sideInput(view).get(0) == c.sideInput(view).get(0));
+ for (Integer i : c.sideInput(view)) {
+ c.output(i);
+ }
+ }
+ }));
PAssert.that(output).containsInAnyOrder(11, 13, 17, 23, 11, 13, 17, 23);
@@ -237,19 +241,21 @@ public class ViewTest implements Serializable {
PCollection<Integer> output =
pipeline.apply("CreateMainInput", Create.timestamped(
- TimestampedValue.of(29, new Instant(1)),
- TimestampedValue.of(35, new Instant(11))))
+ TimestampedValue.of(29, new Instant(1)),
+ TimestampedValue.of(35, new Instant(11))))
.apply("MainWindowInto", Window.<Integer>into(FixedWindows.of(Duration.millis(10))))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- checkArgument(c.sideInput(view).size() == 4);
- checkArgument(c.sideInput(view).get(0) == c.sideInput(view).get(0));
- for (Integer i : c.sideInput(view)) {
- c.output(i);
- }
- }
- }));
+ .apply(
+ "OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ checkArgument(c.sideInput(view).size() == 4);
+ checkArgument(c.sideInput(view).get(0) == c.sideInput(view).get(0));
+ for (Integer i : c.sideInput(view)) {
+ c.output(i);
+ }
+ }
+ }));
PAssert.that(output).containsInAnyOrder(11, 13, 17, 23, 31, 33, 37, 43);
@@ -267,16 +273,17 @@ public class ViewTest implements Serializable {
PCollection<Integer> results =
pipeline.apply("Create1", Create.of(1))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- assertTrue(c.sideInput(view).isEmpty());
- assertFalse(c.sideInput(view).iterator().hasNext());
- c.output(1);
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ assertTrue(c.sideInput(view).isEmpty());
+ assertFalse(c.sideInput(view).iterator().hasNext());
+ c.output(1);
+ }
+ }));
- // Pass at least one value through to guarantee that DoFn executes.
+ // Pass at least one value through to guarantee that OldDoFn executes.
PAssert.that(results).containsInAnyOrder(1);
pipeline.run();
@@ -292,36 +299,37 @@ public class ViewTest implements Serializable {
PCollection<Integer> output =
pipeline.apply("CreateMainInput", Create.of(29))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- try {
- c.sideInput(view).clear();
- fail("Expected UnsupportedOperationException on clear()");
- } catch (UnsupportedOperationException expected) {
- }
- try {
- c.sideInput(view).add(4);
- fail("Expected UnsupportedOperationException on add()");
- } catch (UnsupportedOperationException expected) {
- }
- try {
- c.sideInput(view).addAll(new ArrayList<Integer>());
- fail("Expected UnsupportedOperationException on addAll()");
- } catch (UnsupportedOperationException expected) {
- }
- try {
- c.sideInput(view).remove(0);
- fail("Expected UnsupportedOperationException on remove()");
- } catch (UnsupportedOperationException expected) {
- }
- for (Integer i : c.sideInput(view)) {
- c.output(i);
- }
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ try {
+ c.sideInput(view).clear();
+ fail("Expected UnsupportedOperationException on clear()");
+ } catch (UnsupportedOperationException expected) {
+ }
+ try {
+ c.sideInput(view).add(4);
+ fail("Expected UnsupportedOperationException on add()");
+ } catch (UnsupportedOperationException expected) {
+ }
+ try {
+ c.sideInput(view).addAll(new ArrayList<Integer>());
+ fail("Expected UnsupportedOperationException on addAll()");
+ } catch (UnsupportedOperationException expected) {
+ }
+ try {
+ c.sideInput(view).remove(0);
+ fail("Expected UnsupportedOperationException on remove()");
+ } catch (UnsupportedOperationException expected) {
+ }
+ for (Integer i : c.sideInput(view)) {
+ c.output(i);
+ }
+ }
+ }));
- // Pass at least one value through to guarantee that DoFn executes.
+ // Pass at least one value through to guarantee that OldDoFn executes.
PAssert.that(output).containsInAnyOrder(11);
pipeline.run();
@@ -338,14 +346,15 @@ public class ViewTest implements Serializable {
PCollection<Integer> output =
pipeline.apply("CreateMainInput", Create.of(29, 31))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- for (Integer i : c.sideInput(view)) {
- c.output(i);
- }
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ for (Integer i : c.sideInput(view)) {
+ c.output(i);
+ }
+ }
+ }));
PAssert.that(output).containsInAnyOrder(11, 13, 17, 23, 11, 13, 17, 23);
@@ -371,18 +380,21 @@ public class ViewTest implements Serializable {
.apply(View.<Integer>asIterable());
PCollection<Integer> output =
- pipeline.apply("CreateMainInput", Create.timestamped(
- TimestampedValue.of(29, new Instant(1)),
- TimestampedValue.of(35, new Instant(11))))
+ pipeline
+ .apply("CreateMainInput",
+ Create.timestamped(
+ TimestampedValue.of(29, new Instant(1)),
+ TimestampedValue.of(35, new Instant(11))))
.apply("MainWindowInto", Window.<Integer>into(FixedWindows.of(Duration.millis(10))))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- for (Integer i : c.sideInput(view)) {
- c.output(i);
- }
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ for (Integer i : c.sideInput(view)) {
+ c.output(i);
+ }
+ }
+ }));
PAssert.that(output).containsInAnyOrder(11, 13, 17, 23, 31, 33, 37, 43);
@@ -400,15 +412,16 @@ public class ViewTest implements Serializable {
PCollection<Integer> results =
pipeline.apply("Create1", Create.of(1))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- assertFalse(c.sideInput(view).iterator().hasNext());
- c.output(1);
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ assertFalse(c.sideInput(view).iterator().hasNext());
+ c.output(1);
+ }
+ }));
- // Pass at least one value through to guarantee that DoFn executes.
+ // Pass at least one value through to guarantee that OldDoFn executes.
PAssert.that(results).containsInAnyOrder(1);
pipeline.run();
@@ -424,22 +437,23 @@ public class ViewTest implements Serializable {
PCollection<Integer> output =
pipeline.apply("CreateMainInput", Create.of(29))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- Iterator<Integer> iterator = c.sideInput(view).iterator();
- while (iterator.hasNext()) {
- try {
- iterator.remove();
- fail("Expected UnsupportedOperationException on remove()");
- } catch (UnsupportedOperationException expected) {
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ Iterator<Integer> iterator = c.sideInput(view).iterator();
+ while (iterator.hasNext()) {
+ try {
+ iterator.remove();
+ fail("Expected UnsupportedOperationException on remove()");
+ } catch (UnsupportedOperationException expected) {
+ }
+ c.output(iterator.next());
+ }
}
- c.output(iterator.next());
- }
- }
- }));
+ }));
- // Pass at least one value through to guarantee that DoFn executes.
+ // Pass at least one value through to guarantee that OldDoFn executes.
PAssert.that(output).containsInAnyOrder(11);
pipeline.run();
@@ -458,7 +472,7 @@ public class ViewTest implements Serializable {
pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
.apply(
"OutputSideInputs",
- ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+ ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
for (Integer v : c.sideInput(view).get(c.element().substring(0, 1))) {
@@ -486,7 +500,7 @@ public class ViewTest implements Serializable {
pipeline.apply("CreateMainInput", Create.of(2 /* size */))
.apply(
"OutputSideInputs",
- ParDo.withSideInputs(view).of(new DoFn<Integer, KV<String, Integer>>() {
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
assertEquals((int) c.element(), c.sideInput(view).size());
@@ -540,7 +554,7 @@ public class ViewTest implements Serializable {
pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
.apply(
"OutputSideInputs",
- ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+ ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
for (Integer v : c.sideInput(view).get(c.element().substring(0, 1))) {
@@ -577,7 +591,7 @@ public class ViewTest implements Serializable {
TimestampedValue.of("blackberry", new Instant(16))))
.apply("MainWindowInto", Window.<String>into(FixedWindows.of(Duration.millis(10))))
.apply("OutputSideInputs", ParDo.withSideInputs(view).of(
- new DoFn<String, KV<String, Integer>>() {
+ new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
for (Integer v :
@@ -615,7 +629,7 @@ public class ViewTest implements Serializable {
TimestampedValue.of(1 /* size */, new Instant(16))))
.apply("MainWindowInto", Window.<Integer>into(FixedWindows.of(Duration.millis(10))))
.apply("OutputSideInputs", ParDo.withSideInputs(view).of(
- new DoFn<Integer, KV<String, Integer>>() {
+ new OldDoFn<Integer, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
assertEquals((int) c.element(),
@@ -660,7 +674,7 @@ public class ViewTest implements Serializable {
TimestampedValue.of("blackberry", new Instant(16))))
.apply("MainWindowInto", Window.<String>into(FixedWindows.of(Duration.millis(10))))
.apply("OutputSideInputs", ParDo.withSideInputs(view).of(
- new DoFn<String, KV<String, Integer>>() {
+ new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
for (Integer v :
@@ -689,17 +703,18 @@ public class ViewTest implements Serializable {
PCollection<Integer> results =
pipeline.apply("Create1", Create.of(1))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- assertTrue(c.sideInput(view).isEmpty());
- assertTrue(c.sideInput(view).entrySet().isEmpty());
- assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
- c.output(c.element());
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ assertTrue(c.sideInput(view).isEmpty());
+ assertTrue(c.sideInput(view).entrySet().isEmpty());
+ assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
+ c.output(c.element());
+ }
+ }));
- // Pass at least one value through to guarantee that DoFn executes.
+ // Pass at least one value through to guarantee that OldDoFn executes.
PAssert.that(results).containsInAnyOrder(1);
pipeline.run();
@@ -718,17 +733,18 @@ public class ViewTest implements Serializable {
PCollection<Integer> results =
pipeline.apply("Create1", Create.of(1))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- assertTrue(c.sideInput(view).isEmpty());
- assertTrue(c.sideInput(view).entrySet().isEmpty());
- assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
- c.output(c.element());
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ assertTrue(c.sideInput(view).isEmpty());
+ assertTrue(c.sideInput(view).entrySet().isEmpty());
+ assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
+ c.output(c.element());
+ }
+ }));
- // Pass at least one value through to guarantee that DoFn executes.
+ // Pass at least one value through to guarantee that OldDoFn executes.
PAssert.that(results).containsInAnyOrder(1);
pipeline.run();
@@ -747,7 +763,7 @@ public class ViewTest implements Serializable {
pipeline.apply("CreateMainInput", Create.of("apple"))
.apply(
"OutputSideInputs",
- ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+ ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
try {
@@ -776,7 +792,7 @@ public class ViewTest implements Serializable {
}
}));
- // Pass at least one value through to guarantee that DoFn executes.
+ // Pass at least one value through to guarantee that OldDoFn executes.
PAssert.that(output).containsInAnyOrder(KV.of("apple", 1));
pipeline.run();
@@ -795,7 +811,7 @@ public class ViewTest implements Serializable {
pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
.apply(
"OutputSideInputs",
- ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+ ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.output(
@@ -822,7 +838,7 @@ public class ViewTest implements Serializable {
pipeline.apply("CreateMainInput", Create.of(2 /* size */))
.apply(
"OutputSideInputs",
- ParDo.withSideInputs(view).of(new DoFn<Integer, KV<String, Integer>>() {
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
assertEquals((int) c.element(), c.sideInput(view).size());
@@ -854,7 +870,7 @@ public class ViewTest implements Serializable {
pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
.apply(
"OutputSideInputs",
- ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+ ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.output(
@@ -890,7 +906,7 @@ public class ViewTest implements Serializable {
TimestampedValue.of("blackberry", new Instant(16))))
.apply("MainWindowInto", Window.<String>into(FixedWindows.of(Duration.millis(10))))
.apply("OutputSideInputs", ParDo.withSideInputs(view).of(
- new DoFn<String, KV<String, Integer>>() {
+ new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.output(KV.of(
@@ -927,7 +943,7 @@ public class ViewTest implements Serializable {
TimestampedValue.of(1 /* size */, new Instant(16))))
.apply("MainWindowInto", Window.<Integer>into(FixedWindows.of(Duration.millis(10))))
.apply("OutputSideInputs", ParDo.withSideInputs(view).of(
- new DoFn<Integer, KV<String, Integer>>() {
+ new OldDoFn<Integer, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
assertEquals((int) c.element(),
@@ -972,7 +988,7 @@ public class ViewTest implements Serializable {
TimestampedValue.of("blackberry", new Instant(16))))
.apply("MainWindowInto", Window.<String>into(FixedWindows.of(Duration.millis(10))))
.apply("OutputSideInputs", ParDo.withSideInputs(view).of(
- new DoFn<String, KV<String, Integer>>() {
+ new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.output(KV.of(
@@ -1000,17 +1016,18 @@ public class ViewTest implements Serializable {
PCollection<Integer> results =
pipeline.apply("Create1", Create.of(1))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- assertTrue(c.sideInput(view).isEmpty());
- assertTrue(c.sideInput(view).entrySet().isEmpty());
- assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
- c.output(c.element());
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ assertTrue(c.sideInput(view).isEmpty());
+ assertTrue(c.sideInput(view).entrySet().isEmpty());
+ assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
+ c.output(c.element());
+ }
+ }));
- // Pass at least one value through to guarantee that DoFn executes.
+ // Pass at least one value through to guarantee that OldDoFn executes.
PAssert.that(results).containsInAnyOrder(1);
pipeline.run();
@@ -1028,17 +1045,18 @@ public class ViewTest implements Serializable {
PCollection<Integer> results =
pipeline.apply("Create1", Create.of(1))
- .apply("OutputSideInputs", ParDo.withSideInputs(view).of(new DoFn<Integer, Integer>() {
- @Override
- public void processElement(ProcessContext c) {
- assertTrue(c.sideInput(view).isEmpty());
- assertTrue(c.sideInput(view).entrySet().isEmpty());
- assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
- c.output(c.element());
- }
- }));
+ .apply("OutputSideInputs",
+ ParDo.withSideInputs(view).of(new OldDoFn<Integer, Integer>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ assertTrue(c.sideInput(view).isEmpty());
+ assertTrue(c.sideInput(view).entrySet().isEmpty());
+ assertFalse(c.sideInput(view).entrySet().iterator().hasNext());
+ c.output(c.element());
+ }
+ }));
- // Pass at least one value through to guarantee that DoFn executes.
+ // Pass at least one value through to guarantee that OldDoFn executes.
PAssert.that(results).containsInAnyOrder(1);
pipeline.run();
@@ -1062,7 +1080,7 @@ public class ViewTest implements Serializable {
pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
.apply(
"OutputSideInputs",
- ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+ ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.output(
@@ -1093,7 +1111,7 @@ public class ViewTest implements Serializable {
pipeline.apply("CreateMainInput", Create.of("apple"))
.apply(
"OutputSideInputs",
- ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
+ ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
@Override
public void processElement(ProcessContext c) {
try {
@@ -1121,7 +1139,7 @@ public class ViewTest implements Serializable {
}
}));
- // Pass at least one value through to guarantee that DoFn executes.
+ // Pass at least one value through to guarantee that OldDoFn executes.
PAssert.that(output).containsInAnyOrder(KV.of("apple", 1));
pipeline.run();
@@ -1139,12 +1157,14 @@ public class ViewTest implements Serializable {
PCollection<KV<String, Integer>> output =
pipeline.apply("CreateMainInput", Create.of("apple", "banana", "blackberry"))
- .apply("Output", ParDo.withSideInputs(view).of(new DoFn<String, KV<String, Integer>>() {
- @Override
- public void processElement(ProcessContext c) {
- c.output(KV.of(c.element(), c.sideInput(view).get(c.element().substring(0, 1))));
- }
- }));
+ .apply("Output",
+ ParDo.withSideInputs(view).of(new OldDoFn<String, KV<String, Integer>>() {
+ @Override
+ public void processElement(ProcessContext c) {
+ c.output(KV
+ .of(c.element(), c.sideInput(view).get(c.element().substring(0, 1))));
+ }
+ }));
PAssert.that(output).containsInAnyOrder(
KV.of("apple", 21), KV.of("banana", 3), KV.of("blackberry", 3));
@@ -1173,7 +1193,7 @@ public class ViewTest implements Serializable {
TimestampedValue.of("C", new Instant(7))))
.apply("WindowMainInput", Window.<String>into(FixedWindows.of(Duration.millis(10))))
.apply("OutputMainAndSideInputs", ParDo.withSideInputs(view).of(
- new DoFn<String, String>() {
+ new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element() + c.sideInput(view));
@@ -1206,7 +1226,7 @@ public class ViewTest implements Serializable {
TimestampedValue.of("C", new Instant(7))))
.apply("WindowMainInput", Window.<String>into(FixedWindows.of(Duration.millis(10))))
.apply("OutputMainAndSideInputs", ParDo.withSideInputs(view).of(
- new DoFn<String, String>() {
+ new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element() + c.sideInput(view));
@@ -1237,7 +1257,7 @@ public class ViewTest implements Serializable {
TimestampedValue.of("C", new Instant(7))))
.apply("WindowMainInput", Window.<String>into(FixedWindows.of(Duration.millis(10))))
.apply("OutputMainAndSideInputs", ParDo.withSideInputs(view).of(
- new DoFn<String, String>() {
+ new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element() + c.sideInput(view));
@@ -1267,7 +1287,7 @@ public class ViewTest implements Serializable {
p.apply("CreateMainInput", Create.of(""))
.apply(
"OutputMainAndSideInputs",
- ParDo.withSideInputs(view).of(new DoFn<String, String>() {
+ ParDo.withSideInputs(view).of(new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element() + c.sideInput(view));
@@ -1285,7 +1305,7 @@ public class ViewTest implements Serializable {
Pipeline pipeline = TestPipeline.create();
final PCollectionView<Iterable<Integer>> view1 =
pipeline.apply("CreateVoid1", Create.of((Void) null).withCoder(VoidCoder.of()))
- .apply("OutputOneInteger", ParDo.of(new DoFn<Void, Integer>() {
+ .apply("OutputOneInteger", ParDo.of(new OldDoFn<Void, Integer>() {
@Override
public void processElement(ProcessContext c) {
c.output(17);
@@ -1297,7 +1317,7 @@ public class ViewTest implements Serializable {
pipeline.apply("CreateVoid2", Create.of((Void) null).withCoder(VoidCoder.of()))
.apply(
"OutputSideInput",
- ParDo.withSideInputs(view1).of(new DoFn<Void, Iterable<Integer>>() {
+ ParDo.withSideInputs(view1).of(new OldDoFn<Void, Iterable<Integer>>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.sideInput(view1));
@@ -1307,8 +1327,8 @@ public class ViewTest implements Serializable {
PCollection<Integer> output =
pipeline.apply("CreateVoid3", Create.of((Void) null).withCoder(VoidCoder.of()))
- .apply(
- "ReadIterableSideInput", ParDo.withSideInputs(view2).of(new DoFn<Void, Integer>() {
+ .apply("ReadIterableSideInput",
+ ParDo.withSideInputs(view2).of(new OldDoFn<Void, Integer>() {
@Override
public void processElement(ProcessContext c) {
for (Iterable<Integer> input : c.sideInput(view2)) {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
index ac67bb4..d2ba452 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java
@@ -65,9 +65,9 @@ public class WithTimestampsTest implements Serializable {
.apply(WithTimestamps.of(timestampFn));
PCollection<KV<String, Instant>> timestampedVals =
- timestamped.apply(ParDo.of(new DoFn<String, KV<String, Instant>>() {
+ timestamped.apply(ParDo.of(new OldDoFn<String, KV<String, Instant>>() {
@Override
- public void processElement(DoFn<String, KV<String, Instant>>.ProcessContext c)
+ public void processElement(OldDoFn<String, KV<String, Instant>>.ProcessContext c)
throws Exception {
c.output(KV.of(c.element(), c.timestamp()));
}
@@ -150,9 +150,9 @@ public class WithTimestampsTest implements Serializable {
WithTimestamps.of(backInTimeFn).withAllowedTimestampSkew(skew.plus(100L)));
PCollection<KV<String, Instant>> timestampedVals =
- timestampedWithSkew.apply(ParDo.of(new DoFn<String, KV<String, Instant>>() {
+ timestampedWithSkew.apply(ParDo.of(new OldDoFn<String, KV<String, Instant>>() {
@Override
- public void processElement(DoFn<String, KV<String, Instant>>.ProcessContext c)
+ public void processElement(OldDoFn<String, KV<String, Instant>>.ProcessContext c)
throws Exception {
c.output(KV.of(c.element(), c.timestamp()));
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
index ce32b7d..c1848c6 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java
@@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertThat;
import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.PBegin;
@@ -50,7 +50,7 @@ public class DisplayDataEvaluatorTest implements Serializable {
new PTransform<PCollection<String>, POutput> () {
@Override
public PCollection<String> apply(PCollection<String> input) {
- return input.apply(ParDo.of(new DoFn<String, String>() {
+ return input.apply(ParDo.of(new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output(c.element());
@@ -79,7 +79,7 @@ public class DisplayDataEvaluatorTest implements Serializable {
@Test
public void testPrimitiveTransform() {
PTransform<? super PCollection<Integer>, ? super PCollection<Integer>> myTransform = ParDo.of(
- new DoFn<Integer, Integer>() {
+ new OldDoFn<Integer, Integer>() {
@Override
public void processElement(ProcessContext c) throws Exception {}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java
index 07029e9..fa44390 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java
@@ -22,6 +22,7 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasName
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasType;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
[15/19] incubator-beam git commit: Port WindowedWordCount example
from OldDoFn to DoFn
Posted by dh...@apache.org.
Port WindowedWordCount example from OldDoFn to DoFn
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/ca9e3372
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/ca9e3372
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/ca9e3372
Branch: refs/heads/master
Commit: ca9e337203208c7c5876f0710fb3a45430a5b3a8
Parents: 4ceec0e
Author: Kenneth Knowles <kl...@google.com>
Authored: Fri Jul 22 14:29:01 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Wed Aug 3 18:25:53 2016 -0700
----------------------------------------------------------------------
.../org/apache/beam/examples/WindowedWordCount.java | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/ca9e3372/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
index 17f7da3..842cb54 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.FixedWindows;
import org.apache.beam.sdk.transforms.windowing.Window;
@@ -103,14 +103,14 @@ public class WindowedWordCount {
static final int WINDOW_SIZE = 1; // Default window duration in minutes
/**
- * Concept #2: A OldDoFn that sets the data element timestamp. This is a silly method, just for
+ * Concept #2: A DoFn that sets the data element timestamp. This is a silly method, just for
* this example, for the bounded data case.
*
* <p>Imagine that many ghosts of Shakespeare are all typing madly at the same time to recreate
* his masterworks. Each line of the corpus will get a random associated timestamp somewhere in a
* 2-hour period.
*/
- static class AddTimestampFn extends OldDoFn<String, String> {
+ static class AddTimestampFn extends DoFn<String, String> {
private static final Duration RAND_RANGE = Duration.standardHours(2);
private final Instant minTimestamp;
@@ -118,7 +118,7 @@ public class WindowedWordCount {
this.minTimestamp = new Instant(System.currentTimeMillis());
}
- @Override
+ @ProcessElement
public void processElement(ProcessContext c) {
// Generate a timestamp that falls somewhere in the past two hours.
long randMillis = (long) (Math.random() * RAND_RANGE.getMillis());
@@ -130,9 +130,9 @@ public class WindowedWordCount {
}
}
- /** A OldDoFn that converts a Word and Count into a BigQuery table row. */
- static class FormatAsTableRowFn extends OldDoFn<KV<String, Long>, TableRow> {
- @Override
+ /** A DoFn that converts a Word and Count into a BigQuery table row. */
+ static class FormatAsTableRowFn extends DoFn<KV<String, Long>, TableRow> {
+ @ProcessElement
public void processElement(ProcessContext c) {
TableRow row = new TableRow()
.set("word", c.element().getKey())
[05/19] incubator-beam git commit: Rename DoFn to OldDoFn
Posted by dh...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
index cafe873..517f968 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java
@@ -24,6 +24,7 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasName
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasType;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom;
+
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.everyItem;
@@ -40,7 +41,7 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.display.DisplayData.Builder;
@@ -54,7 +55,6 @@ import com.google.common.testing.EqualsTester;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
-
import org.hamcrest.CustomTypeSafeMatcher;
import org.hamcrest.FeatureMatcher;
import org.hamcrest.Matcher;
@@ -1053,7 +1053,7 @@ public class DisplayDataTest implements Serializable {
private static class IdentityTransform<T> extends PTransform<PCollection<T>, PCollection<T>> {
@Override
public PCollection<T> apply(PCollection<T> input) {
- return input.apply(ParDo.of(new DoFn<T, T>() {
+ return input.apply(ParDo.of(new OldDoFn<T, T>() {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output(c.element());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
index 10a2a7e..97667a3 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java
@@ -29,9 +29,9 @@ import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.RunnableOnService;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.DoFnTester;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -85,8 +85,8 @@ public class CoGroupByKeyTest implements Serializable {
.withCoder(KvCoder.of(BigEndianIntegerCoder.of(), StringUtf8Coder.of())));
}
return input
- .apply("Identity" + name, ParDo.of(new DoFn<KV<Integer, String>,
- KV<Integer, String>>() {
+ .apply("Identity" + name, ParDo.of(new OldDoFn<KV<Integer, String>,
+ KV<Integer, String>>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element());
@@ -313,11 +313,11 @@ public class CoGroupByKeyTest implements Serializable {
}
/**
- * A DoFn used in testCoGroupByKeyWithWindowing(), to test processing the
+ * A OldDoFn used in testCoGroupByKeyWithWindowing(), to test processing the
* results of a CoGroupByKey.
*/
private static class ClickOfPurchaseFn extends
- DoFn<KV<Integer, CoGbkResult>, KV<String, String>> implements RequiresWindowAccess {
+ OldDoFn<KV<Integer, CoGbkResult>, KV<String, String>> implements RequiresWindowAccess {
private final TupleTag<String> clicksTag;
private final TupleTag<String> purchasesTag;
@@ -347,11 +347,11 @@ public class CoGroupByKeyTest implements Serializable {
/**
- * A DoFn used in testCoGroupByKeyHandleResults(), to test processing the
+ * A OldDoFn used in testCoGroupByKeyHandleResults(), to test processing the
* results of a CoGroupByKey.
*/
private static class CorrelatePurchaseCountForAddressesWithoutNamesFn extends
- DoFn<KV<Integer, CoGbkResult>, KV<String, Integer>> {
+ OldDoFn<KV<Integer, CoGbkResult>, KV<String, Integer>> {
private final TupleTag<String> purchasesTag;
private final TupleTag<String> addressesTag;
@@ -401,7 +401,7 @@ public class CoGroupByKeyTest implements Serializable {
}
/**
- * Tests that the consuming DoFn
+ * Tests that the consuming OldDoFn
* (CorrelatePurchaseCountForAddressesWithoutNamesFn) performs as expected.
*/
@SuppressWarnings("unchecked")
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java
index fb2b4d5..ed64f84 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertThat;
import org.apache.beam.sdk.util.TriggerTester;
import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester;
import org.apache.beam.sdk.values.TimestampedValue;
+
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.junit.Before;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
index 76bc038..27d2539 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java
@@ -36,8 +36,8 @@ import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.testing.RunnableOnService;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.util.WindowingStrategy;
@@ -199,7 +199,7 @@ public class WindowTest implements Serializable {
.apply(GroupByKey.<Integer, String>create())
.apply(
ParDo.of(
- new DoFn<KV<Integer, Iterable<String>>, Void>() {
+ new OldDoFn<KV<Integer, Iterable<String>>, Void>() {
@Override
public void processElement(ProcessContext c) throws Exception {
assertThat(
@@ -231,7 +231,7 @@ public class WindowTest implements Serializable {
.apply(Window.<KV<Integer, String>>into(FixedWindows.of(Duration.standardMinutes(10)))
.withOutputTimeFn(OutputTimeFns.outputAtEndOfWindow()))
.apply(GroupByKey.<Integer, String>create())
- .apply(ParDo.of(new DoFn<KV<Integer, Iterable<String>>, Void>() {
+ .apply(ParDo.of(new OldDoFn<KV<Integer, Iterable<String>>, Void>() {
@Override
public void processElement(ProcessContext c) throws Exception {
assertThat(c.timestamp(), equalTo(new Instant(10 * 60 * 1000 - 1)));
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
index c1e092a..622a277 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java
@@ -26,9 +26,9 @@ import org.apache.beam.sdk.testing.RunnableOnService;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Count;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.OldDoFn;
+import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.KV;
@@ -59,7 +59,7 @@ public class WindowingTest implements Serializable {
private static class WindowedCount extends PTransform<PCollection<String>, PCollection<String>> {
private final class FormatCountsDoFn
- extends DoFn<KV<String, Long>, String> implements RequiresWindowAccess {
+ extends OldDoFn<KV<String, Long>, String> implements RequiresWindowAccess {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().getKey() + ":" + c.element().getValue()
@@ -234,8 +234,8 @@ public class WindowingTest implements Serializable {
p.run();
}
- /** A DoFn that tokenizes lines of text into individual words. */
- static class ExtractWordsWithTimestampsFn extends DoFn<String, String> {
+ /** A OldDoFn that tokenizes lines of text into individual words. */
+ static class ExtractWordsWithTimestampsFn extends OldDoFn<String, String> {
@Override
public void processElement(ProcessContext c) {
String[] words = c.element().split("[^a-zA-Z0-9']+");
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java
index c808b4d..ee5a2b3 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java
@@ -18,10 +18,12 @@
package org.apache.beam.sdk.util;
-import org.apache.beam.sdk.transforms.Combine;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
+
+import org.apache.beam.sdk.transforms.Combine;
+
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java
index 2cbc20e..b95f235 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java
@@ -18,10 +18,12 @@
package org.apache.beam.sdk.util;
-import org.apache.beam.sdk.transforms.Combine;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
+
+import org.apache.beam.sdk.transforms.Combine;
+
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
index d9e7593..30406fc 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
@@ -27,7 +27,6 @@ import com.google.common.collect.ImmutableList;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
-
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializerTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializerTest.java
index 6c5d0bd..f6bacc4 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializerTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializerTest.java
@@ -25,7 +25,6 @@ import static org.apache.beam.sdk.util.Structs.addString;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
-
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Test;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java
index 7e68df9..e87bbee 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java
@@ -59,12 +59,12 @@ public class StringUtilsTest {
/**
* Inner class for simple name test.
*/
- private class EmbeddedDoFn {
+ private class EmbeddedOldDoFn {
- private class DeeperEmbeddedDoFn extends EmbeddedDoFn {}
+ private class DeeperEmbeddedOldDoFn extends EmbeddedOldDoFn {}
- private EmbeddedDoFn getEmbedded() {
- return new DeeperEmbeddedDoFn();
+ private EmbeddedOldDoFn getEmbedded() {
+ return new DeeperEmbeddedOldDoFn();
}
}
@@ -93,22 +93,22 @@ public class StringUtilsTest {
@Test
public void testSimpleName() {
assertEquals("Embedded",
- StringUtils.approximateSimpleName(EmbeddedDoFn.class));
+ StringUtils.approximateSimpleName(EmbeddedOldDoFn.class));
}
@Test
public void testAnonSimpleName() throws Exception {
thrown.expect(IllegalArgumentException.class);
- EmbeddedDoFn anon = new EmbeddedDoFn(){};
+ EmbeddedOldDoFn anon = new EmbeddedOldDoFn(){};
StringUtils.approximateSimpleName(anon.getClass());
}
@Test
public void testNestedSimpleName() {
- EmbeddedDoFn fn = new EmbeddedDoFn();
- EmbeddedDoFn inner = fn.getEmbedded();
+ EmbeddedOldDoFn fn = new EmbeddedOldDoFn();
+ EmbeddedOldDoFn inner = fn.getEmbedded();
assertEquals("DeeperEmbedded", StringUtils.approximateSimpleName(inner.getClass()));
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java
index b321c8f..4892bbd 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java
@@ -20,6 +20,7 @@ package org.apache.beam.sdk.util;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
+
import static org.junit.Assert.assertTrue;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/CounterTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/CounterTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/CounterTest.java
index fb002de..79f0cb7 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/CounterTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/CounterTest.java
@@ -31,6 +31,7 @@ import static org.junit.Assert.assertTrue;
import org.apache.beam.sdk.util.common.Counter.CommitState;
import org.apache.beam.sdk.util.common.Counter.CounterMean;
+
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
index 9a8ab30..547c778 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.RunnableOnService;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.util.WindowingStrategy;
import org.apache.beam.sdk.values.PCollection.IsBounded;
@@ -75,7 +75,7 @@ public final class PCollectionTupleTest implements Serializable {
.apply(Create.of(inputs));
PCollectionTuple outputs = mainInput.apply(ParDo
- .of(new DoFn<Integer, Integer>() {
+ .of(new OldDoFn<Integer, Integer>() {
@Override
public void processElement(ProcessContext c) {
c.sideOutput(sideOutputTag, c.element());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
index ba5dffb..c525cf1 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.coders.VarIntCoder;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.junit.Rule;
@@ -44,7 +44,7 @@ public class TypedPValueTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
- private static class IdentityDoFn extends DoFn<Integer, Integer> {
+ private static class IdentityDoFn extends OldDoFn<Integer, Integer> {
private static final long serialVersionUID = 0;
@Override
public void processElement(ProcessContext c) throws Exception {
@@ -129,7 +129,7 @@ public class TypedPValueTest {
static class EmptyClass {
}
- private static class EmptyClassDoFn extends DoFn<Integer, EmptyClass> {
+ private static class EmptyClassDoFn extends OldDoFn<Integer, EmptyClass> {
private static final long serialVersionUID = 0;
@Override
public void processElement(ProcessContext c) throws Exception {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java b/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
index 72abaea..88836f9 100644
--- a/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
+++ b/sdks/java/extensions/join-library/src/main/java/org/apache/beam/sdk/extensions/joinlibrary/Join.java
@@ -20,7 +20,7 @@ package org.apache.beam.sdk.extensions.joinlibrary;
import static com.google.common.base.Preconditions.checkNotNull;
import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.join.CoGbkResult;
import org.apache.beam.sdk.transforms.join.CoGroupByKey;
@@ -59,7 +59,7 @@ public class Join {
.apply(CoGroupByKey.<K>create());
return coGbkResultCollection.apply(ParDo.of(
- new DoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
+ new OldDoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
@Override
public void processElement(ProcessContext c) {
KV<K, CoGbkResult> e = c.element();
@@ -108,7 +108,7 @@ public class Join {
.apply(CoGroupByKey.<K>create());
return coGbkResultCollection.apply(ParDo.of(
- new DoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
+ new OldDoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
@Override
public void processElement(ProcessContext c) {
KV<K, CoGbkResult> e = c.element();
@@ -161,7 +161,7 @@ public class Join {
.apply(CoGroupByKey.<K>create());
return coGbkResultCollection.apply(ParDo.of(
- new DoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
+ new OldDoFn<KV<K, CoGbkResult>, KV<K, KV<V1, V2>>>() {
@Override
public void processElement(ProcessContext c) {
KV<K, CoGbkResult> e = c.element();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
index 76f7079..9fccbf9 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
@@ -44,7 +44,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.runners.PipelineRunner;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -324,7 +324,7 @@ public class BigQueryIO {
* <p>Each {@link TableRow} contains values indexed by column name. Here is a
* sample processing function that processes a "line" column from rows:
* <pre>{@code
- * static class ExtractWordsFn extends DoFn<TableRow, String> {
+ * static class ExtractWordsFn extends OldDoFn<TableRow, String> {
* public void processElement(ProcessContext c) {
* // Get the "line" field of the TableRow object, split it into words, and emit them.
* TableRow row = c.element();
@@ -696,7 +696,7 @@ public class BigQueryIO {
input.getPipeline()
.apply("Create(CleanupOperation)", Create.of(cleanupOperation))
.apply("Cleanup", ParDo.of(
- new DoFn<CleanupOperation, Void>() {
+ new OldDoFn<CleanupOperation, Void>() {
@Override
public void processElement(ProcessContext c)
throws Exception {
@@ -707,7 +707,7 @@ public class BigQueryIO {
return outputs.get(mainOutput);
}
- private static class IdentityFn<T> extends DoFn<T, T> {
+ private static class IdentityFn<T> extends OldDoFn<T, T> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element());
@@ -1262,7 +1262,7 @@ public class BigQueryIO {
* <p>Here is a sample transform that produces TableRow values containing
* "word" and "count" columns:
* <pre>{@code
- * static class FormatCountsFn extends DoFn<KV<String, Long>, TableRow> {
+ * static class FormatCountsFn extends OldDoFn<KV<String, Long>, TableRow> {
* public void processElement(ProcessContext c) {
* TableRow row = new TableRow()
* .set("word", c.element().getKey())
@@ -2011,11 +2011,11 @@ public class BigQueryIO {
/////////////////////////////////////////////////////////////////////////////
/**
- * Implementation of DoFn to perform streaming BigQuery write.
+ * Implementation of OldDoFn to perform streaming BigQuery write.
*/
@SystemDoFnInternal
private static class StreamingWriteFn
- extends DoFn<KV<ShardedKey<String>, TableRowInfo>, Void> {
+ extends OldDoFn<KV<ShardedKey<String>, TableRowInfo>, Void> {
/** TableSchema in JSON. Use String to make the class Serializable. */
private final String jsonTableSchema;
@@ -2248,8 +2248,8 @@ public class BigQueryIO {
* id is created by concatenating this randomUUID with a sequential number.
*/
private static class TagWithUniqueIdsAndTable
- extends DoFn<TableRow, KV<ShardedKey<String>, TableRowInfo>>
- implements DoFn.RequiresWindowAccess {
+ extends OldDoFn<TableRow, KV<ShardedKey<String>, TableRowInfo>>
+ implements OldDoFn.RequiresWindowAccess {
/** TableSpec to write to. */
private final String tableSpec;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
index f4082d4..1f77e3e 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
@@ -31,7 +31,7 @@ import org.apache.beam.sdk.io.range.ByteKeyRange;
import org.apache.beam.sdk.io.range.ByteKeyRangeTracker;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.runners.PipelineRunner;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -512,7 +512,7 @@ public class BigtableIO {
return new BigtableServiceImpl(options);
}
- private class BigtableWriterFn extends DoFn<KV<ByteString, Iterable<Mutation>>, Void> {
+ private class BigtableWriterFn extends OldDoFn<KV<ByteString, Iterable<Mutation>>, Void> {
public BigtableWriterFn(String tableId, BigtableService bigtableService) {
this.tableId = checkNotNull(tableId, "tableId");
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
index bda907a..6f3663a 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3.java
@@ -37,9 +37,9 @@ import org.apache.beam.sdk.io.Sink.Writer;
import org.apache.beam.sdk.options.GcpOptions;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Values;
@@ -85,7 +85,6 @@ import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.NoSuchElementException;
-
import javax.annotation.Nullable;
/**
@@ -479,11 +478,11 @@ public class V1Beta3 {
}
/**
- * A {@link DoFn} that splits a given query into multiple sub-queries, assigns them unique keys
- * and outputs them as {@link KV}.
+ * A {@link OldDoFn} that splits a given query into multiple sub-queries, assigns them unique
+ * keys and outputs them as {@link KV}.
*/
@VisibleForTesting
- static class SplitQueryFn extends DoFn<Query, KV<Integer, Query>> {
+ static class SplitQueryFn extends OldDoFn<Query, KV<Integer, Query>> {
private final V1Beta3Options options;
// number of splits to make for a given query
private final int numSplits;
@@ -560,10 +559,10 @@ public class V1Beta3 {
}
/**
- * A {@link DoFn} that reads entities from Datastore for each query.
+ * A {@link OldDoFn} that reads entities from Datastore for each query.
*/
@VisibleForTesting
- static class ReadFn extends DoFn<Query, Entity> {
+ static class ReadFn extends OldDoFn<Query, Entity> {
private final V1Beta3Options options;
private final V1Beta3DatastoreFactory datastoreFactory;
// Datastore client
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
index 00e7891..7d2df62 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
@@ -58,7 +58,7 @@ import org.apache.beam.sdk.testing.SourceTestUtils;
import org.apache.beam.sdk.testing.SourceTestUtils.ExpectedSplitOutcome;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -216,7 +216,7 @@ public class BigQueryIOTest implements Serializable {
private Object[] pollJobReturns;
private String executingProject;
// Both counts will be reset back to zeros after serialization.
- // This is a work around for DoFn's verifyUnmodified check.
+ // This is a work around for OldDoFn's verifyUnmodified check.
private transient int startJobCallsCount;
private transient int pollJobStatusCallsCount;
@@ -546,7 +546,7 @@ public class BigQueryIOTest implements Serializable {
.apply(BigQueryIO.Read.from("non-executing-project:somedataset.sometable")
.withTestServices(fakeBqServices)
.withoutValidation())
- .apply(ParDo.of(new DoFn<TableRow, String>() {
+ .apply(ParDo.of(new OldDoFn<TableRow, String>() {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output((String) c.element().get("name"));
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
index a39d7d5..83489a5 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.io.CountingInput;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.KV;
@@ -108,7 +108,7 @@ public class BigtableWriteIT implements Serializable {
Pipeline p = Pipeline.create(options);
p.apply(CountingInput.upTo(numRows))
- .apply(ParDo.of(new DoFn<Long, KV<ByteString, Iterable<Mutation>>>() {
+ .apply(ParDo.of(new OldDoFn<Long, KV<ByteString, Iterable<Mutation>>>() {
@Override
public void processElement(ProcessContext c) {
int index = c.element().intValue();
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
index 59d91d4..daed1cb 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1Beta3TestUtil.java
@@ -27,7 +27,7 @@ import static com.google.datastore.v1beta3.client.DatastoreHelper.makeValue;
import org.apache.beam.sdk.options.GcpOptions;
import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff;
import org.apache.beam.sdk.util.RetryHttpRequestInitializer;
@@ -109,9 +109,9 @@ class V1Beta3TestUtil {
}
/**
- * A DoFn that creates entity for a long number.
+ * A OldDoFn that creates entity for a long number.
*/
- static class CreateEntityFn extends DoFn<Long, Entity> {
+ static class CreateEntityFn extends OldDoFn<Long, Entity> {
private final String kind;
@Nullable
private final String namespace;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
index 2de933c..342c4fc 100644
--- a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
+++ b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
@@ -28,7 +28,7 @@ import org.apache.beam.sdk.io.UnboundedSource;
import org.apache.beam.sdk.io.UnboundedSource.CheckpointMark;
import org.apache.beam.sdk.io.UnboundedSource.UnboundedReader;
import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.display.DisplayData;
@@ -453,7 +453,7 @@ public class JmsIO {
checkArgument((queue != null || topic != null), "Either queue or topic is required");
}
- private static class JmsWriter extends DoFn<String, Void> {
+ private static class JmsWriter extends OldDoFn<String, Void> {
private ConnectionFactory connectionFactory;
private String queue;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
index 3b64bd5..eb649a6 100644
--- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
+++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
@@ -33,7 +33,7 @@ import org.apache.beam.sdk.io.UnboundedSource.CheckpointMark;
import org.apache.beam.sdk.io.UnboundedSource.UnboundedReader;
import org.apache.beam.sdk.io.kafka.KafkaCheckpointMark.PartitionMark;
import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -550,7 +550,7 @@ public class KafkaIO {
return typedRead
.apply(begin)
.apply("Remove Kafka Metadata",
- ParDo.of(new DoFn<KafkaRecord<K, V>, KV<K, V>>() {
+ ParDo.of(new OldDoFn<KafkaRecord<K, V>, KV<K, V>>() {
@Override
public void processElement(ProcessContext ctx) {
ctx.output(ctx.element().getKV());
@@ -1315,7 +1315,7 @@ public class KafkaIO {
public PDone apply(PCollection<V> input) {
return input
.apply("Kafka values with default key",
- ParDo.of(new DoFn<V, KV<Void, V>>() {
+ ParDo.of(new OldDoFn<V, KV<Void, V>>() {
@Override
public void processElement(ProcessContext ctx) throws Exception {
ctx.output(KV.<Void, V>of(null, ctx.element()));
@@ -1326,7 +1326,7 @@ public class KafkaIO {
}
}
- private static class KafkaWriter<K, V> extends DoFn<KV<K, V>, Void> {
+ private static class KafkaWriter<K, V> extends OldDoFn<KV<K, V>, Void> {
@Override
public void startBundle(Context c) throws Exception {
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
index dd93823..d7b1921 100644
--- a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
+++ b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java
@@ -33,10 +33,10 @@ import org.apache.beam.sdk.testing.NeedsRunner;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.Max;
import org.apache.beam.sdk.transforms.Min;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.RemoveDuplicates;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -78,7 +78,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-
import javax.annotation.Nullable;
/**
@@ -281,7 +280,7 @@ public class KafkaIOTest {
p.run();
}
- private static class ElementValueDiff extends DoFn<Long, Long> {
+ private static class ElementValueDiff extends OldDoFn<Long, Long> {
@Override
public void processElement(ProcessContext c) throws Exception {
c.output(c.element() - c.timestamp().getMillis());
@@ -309,7 +308,7 @@ public class KafkaIOTest {
p.run();
}
- private static class RemoveKafkaMetadata<K, V> extends DoFn<KafkaRecord<K, V>, KV<K, V>> {
+ private static class RemoveKafkaMetadata<K, V> extends OldDoFn<KafkaRecord<K, V>, KV<K, V>> {
@Override
public void processElement(ProcessContext ctx) throws Exception {
ctx.output(ctx.element().getKV());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java
----------------------------------------------------------------------
diff --git a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java
index fef8d40..1141e88 100644
--- a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java
+++ b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java
@@ -47,9 +47,9 @@ public class WithTimestampsJava8Test implements Serializable {
.apply(WithTimestamps.of((String input) -> new Instant(Long.valueOf(yearTwoThousand))));
PCollection<KV<String, Instant>> timestampedVals =
- timestamped.apply(ParDo.of(new DoFn<String, KV<String, Instant>>() {
+ timestamped.apply(ParDo.of(new OldDoFn<String, KV<String, Instant>>() {
@Override
- public void processElement(DoFn<String, KV<String, Instant>>.ProcessContext c)
+ public void processElement(OldDoFn<String, KV<String, Instant>>.ProcessContext c)
throws Exception {
c.output(KV.of(c.element(), c.timestamp()));
}
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java
index c0e5b17..bc55c06 100644
--- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java
+++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java
@@ -25,7 +25,7 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
import org.apache.beam.sdk.values.KV;
@@ -108,7 +108,7 @@ import java.util.regex.Pattern;
*/
public class DebuggingWordCount {
/** A DoFn that filters for a specific key based upon a regular expression. */
- public static class FilterTextFn extends DoFn<KV<String, Long>, KV<String, Long>> {
+ public static class FilterTextFn extends OldDoFn<KV<String, Long>, KV<String, Long>> {
/**
* Concept #1: The logger below uses the fully qualified class name of FilterTextFn
* as the logger. All log statements emitted by this logger will be referenced by this name
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/MinimalWordCount.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/MinimalWordCount.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/MinimalWordCount.java
index be32afa..55beb1f 100644
--- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/MinimalWordCount.java
+++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/MinimalWordCount.java
@@ -23,7 +23,7 @@ import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.io.TextIO;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.KV;
@@ -82,7 +82,7 @@ public class MinimalWordCount {
// DoFn (defined in-line) on each element that tokenizes the text line into individual words.
// The ParDo returns a PCollection<String>, where each element is an individual word in
// Shakespeare's collected texts.
- .apply("ExtractWords", ParDo.of(new DoFn<String, String>() {
+ .apply("ExtractWords", ParDo.of(new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) {
for (String word : c.element().split("[^a-zA-Z']+")) {
@@ -98,7 +98,7 @@ public class MinimalWordCount {
.apply(Count.<String>perElement())
// Apply another ParDo transform that formats our PCollection of word counts into a printable
// string, suitable for writing to an output file.
- .apply("FormatResults", ParDo.of(new DoFn<KV<String, Long>, String>() {
+ .apply("FormatResults", ParDo.of(new OldDoFn<KV<String, Long>, String>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().getKey() + ": " + c.element().getValue());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java
index c2defa7..ffe8b88 100644
--- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java
+++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO;
import org.apache.beam.sdk.options.Default;
import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.windowing.FixedWindows;
import org.apache.beam.sdk.transforms.windowing.Window;
@@ -121,7 +121,7 @@ public class WindowedWordCount {
* his masterworks. Each line of the corpus will get a random associated timestamp somewhere in a
* 2-hour period.
*/
- static class AddTimestampFn extends DoFn<String, String> {
+ static class AddTimestampFn extends OldDoFn<String, String> {
private static final long RAND_RANGE = 7200000; // 2 hours in ms
@Override
@@ -137,7 +137,7 @@ public class WindowedWordCount {
}
/** A DoFn that converts a Word and Count into a BigQuery table row. */
- static class FormatAsTableRowFn extends DoFn<KV<String, Long>, TableRow> {
+ static class FormatAsTableRowFn extends OldDoFn<KV<String, Long>, TableRow> {
@Override
public void processElement(ProcessContext c) {
TableRow row = new TableRow()
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WordCount.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WordCount.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WordCount.java
index 803e800..5432036 100644
--- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WordCount.java
+++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WordCount.java
@@ -27,7 +27,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Sum;
@@ -95,7 +95,7 @@ public class WordCount {
* of-line. This DoFn tokenizes lines of text into individual words; we pass it to a ParDo in the
* pipeline.
*/
- static class ExtractWordsFn extends DoFn<String, String> {
+ static class ExtractWordsFn extends OldDoFn<String, String> {
private final Aggregator<Long, Long> emptyLines =
createAggregator("emptyLines", new Sum.SumLongFn());
@@ -118,7 +118,7 @@ public class WordCount {
}
/** A DoFn that converts a Word and Count into a printable string. */
- public static class FormatAsTextFn extends DoFn<KV<String, Long>, String> {
+ public static class FormatAsTextFn extends OldDoFn<KV<String, Long>, String> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().getKey() + ": " + c.element().getValue());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java
index 5c182b2..9b347da 100644
--- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java
+++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.options.Description;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.IntraBundleParallelization;
import org.apache.beam.sdk.util.Transport;
@@ -72,7 +72,7 @@ public class PubsubFileInjector {
}
/** A DoFn that publishes non-empty lines to Google Cloud PubSub. */
- public static class Bound extends DoFn<String, Void> {
+ public static class Bound extends OldDoFn<String, Void> {
private final String outputTopic;
private final String timestampLabelKey;
public transient Pubsub pubsub;
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java b/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java
index 9a75bb7..6a1c41b 100644
--- a/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java
+++ b/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java
@@ -20,7 +20,7 @@ package ${package};
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.slf4j.Logger;
@@ -50,13 +50,13 @@ public class StarterPipeline {
PipelineOptionsFactory.fromArgs(args).withValidation().create());
p.apply(Create.of("Hello", "World"))
- .apply(ParDo.of(new DoFn<String, String>() {
+ .apply(ParDo.of(new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().toUpperCase());
}
}))
- .apply(ParDo.of(new DoFn<String, Void>() {
+ .apply(ParDo.of(new OldDoFn<String, Void>() {
@Override
public void processElement(ProcessContext c) {
LOG.info(c.element());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java
----------------------------------------------------------------------
diff --git a/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java b/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java
index 8c71d9d..7c13350 100644
--- a/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java
+++ b/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java
@@ -20,7 +20,7 @@ package it.pkg;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.slf4j.Logger;
@@ -50,13 +50,13 @@ public class StarterPipeline {
PipelineOptionsFactory.fromArgs(args).withValidation().create());
p.apply(Create.of("Hello", "World"))
- .apply(ParDo.of(new DoFn<String, String>() {
+ .apply(ParDo.of(new OldDoFn<String, String>() {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().toUpperCase());
}
}))
- .apply(ParDo.of(new DoFn<String, Void>() {
+ .apply(ParDo.of(new OldDoFn<String, Void>() {
@Override
public void processElement(ProcessContext c) {
LOG.info(c.element());
http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/a64baf48/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
----------------------------------------------------------------------
diff --git a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
index f1dfbb9..0da75f4 100644
--- a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
+++ b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/transforms/DoFnReflectorBenchmark.java
@@ -20,11 +20,11 @@ package org.apache.beam.sdk.microbenchmarks.transforms;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.DoFnReflector;
import org.apache.beam.sdk.transforms.DoFnReflector.DoFnInvoker;
import org.apache.beam.sdk.transforms.DoFnWithContext;
import org.apache.beam.sdk.transforms.DoFnWithContext.ExtraContextFactory;
+import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.WindowingInternals;
@@ -40,7 +40,7 @@ import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
/**
- * Benchmarks for {@link DoFn} and {@link DoFnWithContext} invocations, specifically
+ * Benchmarks for {@link OldDoFn} and {@link DoFnWithContext} invocations, specifically
* for measuring the overhead of {@link DoFnReflector}.
*/
@State(Scope.Benchmark)
@@ -50,7 +50,7 @@ public class DoFnReflectorBenchmark {
private static final String ELEMENT = "some string to use for testing";
- private DoFn<String, String> doFn = new UpperCaseDoFn();
+ private OldDoFn<String, String> doFn = new UpperCaseDoFn();
private DoFnWithContext<String, String> doFnWithContext = new UpperCaseDoFnWithContext();
private StubDoFnProcessContext stubDoFnContext = new StubDoFnProcessContext(doFn, ELEMENT);
@@ -71,7 +71,7 @@ public class DoFnReflectorBenchmark {
};
private DoFnReflector doFnReflector;
- private DoFn<String, String> adaptedDoFnWithContext;
+ private OldDoFn<String, String> adaptedDoFnWithContext;
private DoFnInvoker<String, String> invoker;
@@ -100,7 +100,7 @@ public class DoFnReflectorBenchmark {
return stubDoFnWithContextContext.output;
}
- private static class UpperCaseDoFn extends DoFn<String, String> {
+ private static class UpperCaseDoFn extends OldDoFn<String, String> {
@Override
public void processElement(ProcessContext c) throws Exception {
@@ -116,12 +116,12 @@ public class DoFnReflectorBenchmark {
}
}
- private static class StubDoFnProcessContext extends DoFn<String, String>.ProcessContext {
+ private static class StubDoFnProcessContext extends OldDoFn<String, String>.ProcessContext {
private final String element;
private String output;
- public StubDoFnProcessContext(DoFn<String, String> fn, String element) {
+ public StubDoFnProcessContext(OldDoFn<String, String> fn, String element) {
fn.super();
this.element = element;
}