You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by ke...@apache.org on 2017/04/25 17:29:56 UTC

[01/50] [abbrv] beam git commit: [BEAM-1964] Fix lint issues for linter upgrade -2

Repository: beam
Updated Branches:
  refs/heads/jstorm-runner f6a89b0fc -> 58d4b97c0


[BEAM-1964] Fix lint issues for linter upgrade -2


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/bf474a0b
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/bf474a0b
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/bf474a0b

Branch: refs/heads/jstorm-runner
Commit: bf474a0b72beb2e946be39ce04e3f07800a3b307
Parents: cf9ac45
Author: Sourabh Bajaj <so...@google.com>
Authored: Thu Apr 13 17:19:56 2017 -0700
Committer: Ahmet Altay <al...@altay-macbookpro2.roam.corp.google.com>
Committed: Fri Apr 14 13:06:14 2017 -0700

----------------------------------------------------------------------
 .../io/gcp/datastore/v1/datastoreio.py          |  4 +--
 .../apache_beam/io/gcp/datastore/v1/helper.py   | 16 ++++-------
 .../io/gcp/datastore/v1/query_splitter.py       |  2 +-
 sdks/python/apache_beam/io/gcp/gcsfilesystem.py |  3 +-
 .../io/gcp/tests/bigquery_matcher.py            |  3 +-
 sdks/python/apache_beam/metrics/cells.py        | 28 +++++++++----------
 sdks/python/apache_beam/metrics/execution.py    |  3 +-
 sdks/python/apache_beam/metrics/metric.py       |  9 ++----
 sdks/python/apache_beam/runners/common.py       |  9 ++----
 .../runners/dataflow/dataflow_metrics_test.py   |  3 +-
 .../runners/dataflow/dataflow_runner.py         |  6 ++--
 .../runners/dataflow/internal/apiclient.py      |  8 +++---
 .../runners/dataflow/internal/dependency.py     |  6 ++--
 .../runners/dataflow/test_dataflow_runner.py    |  4 ---
 .../runners/direct/bundle_factory.py            | 14 ++++------
 .../runners/direct/evaluation_context.py        | 10 +++----
 .../apache_beam/runners/direct/executor.py      |  9 +-----
 .../runners/direct/transform_evaluator.py       |  7 -----
 sdks/python/apache_beam/runners/runner.py       |  3 +-
 .../apache_beam/tests/pipeline_verifiers.py     |  7 ++---
 sdks/python/apache_beam/transforms/combiners.py | 29 +++++++-------------
 .../apache_beam/transforms/combiners_test.py    |  4 +--
 sdks/python/apache_beam/typehints/decorators.py |  3 +-
 sdks/python/apache_beam/typehints/typehints.py  |  3 +-
 24 files changed, 68 insertions(+), 125 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio.py b/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio.py
index e8ca05d..d9b3598 100644
--- a/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio.py
+++ b/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio.py
@@ -253,7 +253,7 @@ class ReadFromDatastore(PTransform):
     query = helper.make_latest_timestamp_query(namespace)
     req = helper.make_request(project, namespace, query)
     resp = datastore.run_query(req)
-    if len(resp.batch.entity_results) == 0:
+    if not resp.batch.entity_results:
       raise RuntimeError("Datastore total statistics unavailable.")
 
     entity = resp.batch.entity_results[0].entity
@@ -281,7 +281,7 @@ class ReadFromDatastore(PTransform):
 
     req = helper.make_request(project, namespace, kind_stats_query)
     resp = datastore.run_query(req)
-    if len(resp.batch.entity_results) == 0:
+    if not resp.batch.entity_results:
       raise RuntimeError("Datastore statistics for kind %s unavailable" % kind)
 
     entity = resp.batch.entity_results[0].entity

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/io/gcp/datastore/v1/helper.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/gcp/datastore/v1/helper.py b/sdks/python/apache_beam/io/gcp/datastore/v1/helper.py
index b1ef9af..d544226 100644
--- a/sdks/python/apache_beam/io/gcp/datastore/v1/helper.py
+++ b/sdks/python/apache_beam/io/gcp/datastore/v1/helper.py
@@ -62,8 +62,7 @@ def key_comparator(k1, k2):
   k2_path = next(k2_iter, None)
   if k2_path:
     return -1
-  else:
-    return 0
+  return 0
 
 
 def compare_path(p1, p2):
@@ -99,8 +98,7 @@ def str_compare(s1, s2):
     return 0
   elif s1 < s2:
     return -1
-  else:
-    return 1
+  return 1
 
 
 def get_datastore(project):
@@ -131,13 +129,9 @@ def make_partition(project, namespace):
 def retry_on_rpc_error(exception):
   """A retry filter for Cloud Datastore RPCErrors."""
   if isinstance(exception, RPCError):
-    if exception.code >= 500:
-      return True
-    else:
-      return False
-  else:
-    # TODO(vikasrk): Figure out what other errors should be retried.
-    return False
+    return exception.code >= 500
+  # TODO(vikasrk): Figure out what other errors should be retried.
+  return False
 
 
 def fetch_entities(project, namespace, query, datastore):

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/io/gcp/datastore/v1/query_splitter.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/gcp/datastore/v1/query_splitter.py b/sdks/python/apache_beam/io/gcp/datastore/v1/query_splitter.py
index 8ced170..d5674f9 100644
--- a/sdks/python/apache_beam/io/gcp/datastore/v1/query_splitter.py
+++ b/sdks/python/apache_beam/io/gcp/datastore/v1/query_splitter.py
@@ -97,7 +97,7 @@ def _validate_query(query):
   if len(query.kind) != 1:
     raise ValueError('Query must have exactly one kind.')
 
-  if len(query.order) != 0:
+  if query.order:
     raise ValueError('Query cannot have any sort orders.')
 
   if query.HasField('limit'):

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/io/gcp/gcsfilesystem.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/gcp/gcsfilesystem.py b/sdks/python/apache_beam/io/gcp/gcsfilesystem.py
index b2bc809..a10a3d2 100644
--- a/sdks/python/apache_beam/io/gcp/gcsfilesystem.py
+++ b/sdks/python/apache_beam/io/gcp/gcsfilesystem.py
@@ -93,8 +93,7 @@ class GCSFileSystem(FileSystem):
     raw_file = gcsio.GcsIO().open(path, mode, mime_type=mime_type)
     if compression_type == CompressionTypes.UNCOMPRESSED:
       return raw_file
-    else:
-      return CompressedFile(raw_file, compression_type=compression_type)
+    return CompressedFile(raw_file, compression_type=compression_type)
 
   def create(self, path, mime_type='application/octet-stream',
              compression_type=CompressionTypes.AUTO):

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py b/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py
index cc26689..66d99b3 100644
--- a/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py
+++ b/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py
@@ -38,8 +38,7 @@ MAX_RETRIES = 4
 
 def retry_on_http_and_value_error(exception):
   """Filter allowing retries on Bigquery errors and value error."""
-  return isinstance(exception, GoogleCloudError) or \
-          isinstance(exception, ValueError)
+  return isinstance(exception, (GoogleCloudError, ValueError))
 
 
 class BigqueryMatcher(BaseMatcher):

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/metrics/cells.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/metrics/cells.py b/sdks/python/apache_beam/metrics/cells.py
index 5a571f5..c421949 100644
--- a/sdks/python/apache_beam/metrics/cells.py
+++ b/sdks/python/apache_beam/metrics/cells.py
@@ -97,9 +97,8 @@ class CellCommitState(object):
     with self._lock:
       if self._state == CellCommitState.CLEAN:
         return False
-      else:
-        self._state = CellCommitState.COMMITTING
-        return True
+      self._state = CellCommitState.COMMITTING
+      return True
 
 
 class MetricCell(object):
@@ -218,8 +217,7 @@ class DistributionResult(object):
     """
     if self.data.count == 0:
       return None
-    else:
-      return float(self.data.sum)/self.data.count
+    return float(self.data.sum)/self.data.count
 
 
 class DistributionData(object):
@@ -257,16 +255,16 @@ class DistributionData(object):
   def combine(self, other):
     if other is None:
       return self
-    else:
-      new_min = (None if self.min is None and other.min is None else
-                 min(x for x in (self.min, other.min) if x is not None))
-      new_max = (None if self.max is None and other.max is None else
-                 max(x for x in (self.max, other.max) if x is not None))
-      return DistributionData(
-          self.sum + other.sum,
-          self.count + other.count,
-          new_min,
-          new_max)
+
+    new_min = (None if self.min is None and other.min is None else
+               min(x for x in (self.min, other.min) if x is not None))
+    new_max = (None if self.max is None and other.max is None else
+               max(x for x in (self.max, other.max) if x is not None))
+    return DistributionData(
+        self.sum + other.sum,
+        self.count + other.count,
+        new_min,
+        new_max)
 
   @classmethod
   def singleton(cls, value):

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/metrics/execution.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/metrics/execution.py b/sdks/python/apache_beam/metrics/execution.py
index f6c8990..887423b 100644
--- a/sdks/python/apache_beam/metrics/execution.py
+++ b/sdks/python/apache_beam/metrics/execution.py
@@ -129,8 +129,7 @@ class _MetricsEnvironment(object):
     index = len(self.PER_THREAD.container) - 1
     if index < 0:
       return None
-    else:
-      return self.PER_THREAD.container[index]
+    return self.PER_THREAD.container[index]
 
   def set_current_container(self, container):
     self.set_container_stack()

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/metrics/metric.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/metrics/metric.py b/sdks/python/apache_beam/metrics/metric.py
index f6a0923..33db4e1 100644
--- a/sdks/python/apache_beam/metrics/metric.py
+++ b/sdks/python/apache_beam/metrics/metric.py
@@ -103,8 +103,7 @@ class MetricResults(object):
         (filter.names and
          metric_key.metric.name in filter.names)):
       return True
-    else:
-      return False
+    return False
 
   @staticmethod
   def _matches_sub_path(actual_scope, filter_scope):
@@ -117,8 +116,7 @@ class MetricResults(object):
       return False  # The first entry was not exactly matched
     elif end_pos != len(actual_scope) and actual_scope[end_pos] != '/':
       return False  # The last entry was not exactly matched
-    else:
-      return True
+    return True
 
   @staticmethod
   def _matches_scope(filter, metric_key):
@@ -139,8 +137,7 @@ class MetricResults(object):
     if (MetricResults._matches_name(filter, metric_key) and
         MetricResults._matches_scope(filter, metric_key)):
       return True
-    else:
-      return False
+    return False
 
   def query(self, filter=None):
     raise NotImplementedError

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/runners/common.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/runners/common.py b/sdks/python/apache_beam/runners/common.py
index 2c1032d..8f86b75 100644
--- a/sdks/python/apache_beam/runners/common.py
+++ b/sdks/python/apache_beam/runners/common.py
@@ -414,10 +414,8 @@ def get_logging_context(maybe_logger, **kwargs):
     maybe_context = maybe_logger.PerThreadLoggingContext(**kwargs)
     if isinstance(maybe_context, LoggingContext):
       return maybe_context
-    else:
-      return _LoggingContextAdapter(maybe_context)
-  else:
-    return LoggingContext()
+    return _LoggingContextAdapter(maybe_context)
+  return LoggingContext()
 
 
 class _ReceiverAdapter(Receiver):
@@ -432,5 +430,4 @@ class _ReceiverAdapter(Receiver):
 def as_receiver(maybe_receiver):
   if isinstance(maybe_receiver, Receiver):
     return maybe_receiver
-  else:
-    return _ReceiverAdapter(maybe_receiver)
+  return _ReceiverAdapter(maybe_receiver)

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/runners/dataflow/dataflow_metrics_test.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_metrics_test.py b/sdks/python/apache_beam/runners/dataflow/dataflow_metrics_test.py
index 95027a3..ffee3e5 100644
--- a/sdks/python/apache_beam/runners/dataflow/dataflow_metrics_test.py
+++ b/sdks/python/apache_beam/runners/dataflow/dataflow_metrics_test.py
@@ -38,8 +38,7 @@ class DictToObject(object):
   def _wrap(self, value):
     if isinstance(value, (tuple, list, set, frozenset)):
       return type(value)([self._wrap(v) for v in value])
-    else:
-      return DictToObject(value) if isinstance(value, dict) else value
+    return DictToObject(value) if isinstance(value, dict) else value
 
 
 class TestDataflowMetrics(unittest.TestCase):

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py
index 1a92c26..2e9fc52 100644
--- a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py
+++ b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py
@@ -92,8 +92,7 @@ class DataflowRunner(PipelineRunner):
         return -1
       elif 'Traceback' in msg:
         return 1
-      else:
-        return 0
+      return 0
 
     job_id = result.job_id()
     while True:
@@ -194,8 +193,7 @@ class DataflowRunner(PipelineRunner):
       return coders.WindowedValueCoder(
           coders.registry.get_coder(typehint),
           window_coder=window_coder)
-    else:
-      return coders.registry.get_coder(typehint)
+    return coders.registry.get_coder(typehint)
 
   def _get_cloud_encoding(self, coder):
     """Returns an encoding based on a coder object."""

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py b/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py
index 6a8aa93..8d44dff 100644
--- a/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py
+++ b/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py
@@ -436,10 +436,10 @@ class DataflowApplicationClient(object):
 
     if not template_location:
       return self.submit_job_description(job)
-    else:
-      logging.info('A template was just created at location %s',
-                   template_location)
-      return None
+
+    logging.info('A template was just created at location %s',
+                 template_location)
+    return None
 
   def create_job_description(self, job):
     """Creates a job described by the workflow proto."""

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/runners/dataflow/internal/dependency.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/runners/dataflow/internal/dependency.py b/sdks/python/apache_beam/runners/dataflow/internal/dependency.py
index 22de5c6..1f28b26 100644
--- a/sdks/python/apache_beam/runners/dataflow/internal/dependency.py
+++ b/sdks/python/apache_beam/runners/dataflow/internal/dependency.py
@@ -493,8 +493,7 @@ def get_sdk_name_and_version():
   container_version = get_required_container_version()
   if container_version == BEAM_CONTAINER_VERSION:
     return ('Apache Beam SDK for Python', beam_version.__version__)
-  else:
-    return ('Google Cloud Dataflow SDK for Python', container_version)
+  return ('Google Cloud Dataflow SDK for Python', container_version)
 
 
 def get_sdk_package_name():
@@ -502,8 +501,7 @@ def get_sdk_package_name():
   container_version = get_required_container_version()
   if container_version == BEAM_CONTAINER_VERSION:
     return BEAM_PACKAGE_NAME
-  else:
-    return GOOGLE_PACKAGE_NAME
+  return GOOGLE_PACKAGE_NAME
 
 
 def _download_pypi_sdk_package(temp_dir):

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py b/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py
index 046313a..4cf4131 100644
--- a/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py
+++ b/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py
@@ -23,10 +23,6 @@ from apache_beam.runners.dataflow.dataflow_runner import DataflowRunner
 
 
 class TestDataflowRunner(DataflowRunner):
-
-  def __init__(self):
-    super(TestDataflowRunner, self).__init__()
-
   def run(self, pipeline):
     """Execute test pipeline and verify test matcher"""
     options = pipeline.options.view_as(TestOptions)

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/runners/direct/bundle_factory.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/runners/direct/bundle_factory.py b/sdks/python/apache_beam/runners/direct/bundle_factory.py
index 647b5f2..42c8095 100644
--- a/sdks/python/apache_beam/runners/direct/bundle_factory.py
+++ b/sdks/python/apache_beam/runners/direct/bundle_factory.py
@@ -127,8 +127,7 @@ class Bundle(object):
     if not self._stacked:
       if self._committed and not make_copy:
         return self._elements
-      else:
-        return list(self._elements)
+      return list(self._elements)
 
     def iterable_stacked_or_elements(elements):
       for e in elements:
@@ -140,9 +139,8 @@ class Bundle(object):
 
     if self._committed and not make_copy:
       return iterable_stacked_or_elements(self._elements)
-    else:
-      # returns a copy.
-      return [e for e in iterable_stacked_or_elements(self._elements)]
+    # returns a copy.
+    return [e for e in iterable_stacked_or_elements(self._elements)]
 
   def has_elements(self):
     return len(self._elements) > 0
@@ -171,9 +169,9 @@ class Bundle(object):
     if not self._stacked:
       self._elements.append(element)
       return
-    if (len(self._elements) > 0 and
-        (isinstance(self._elements[-1], WindowedValue) or
-         isinstance(self._elements[-1], Bundle.StackedWindowedValues)) and
+    if (self._elements and
+        (isinstance(self._elements[-1], (WindowedValue,
+                                         Bundle.StackedWindowedValues))) and
         self._elements[-1].timestamp == element.timestamp and
         self._elements[-1].windows == element.windows):
       if isinstance(self._elements[-1], WindowedValue):

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/runners/direct/evaluation_context.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/runners/direct/evaluation_context.py b/sdks/python/apache_beam/runners/direct/evaluation_context.py
index 8114104..2169c7c 100644
--- a/sdks/python/apache_beam/runners/direct/evaluation_context.py
+++ b/sdks/python/apache_beam/runners/direct/evaluation_context.py
@@ -281,11 +281,11 @@ class EvaluationContext(object):
     """
     if transform:
       return self._is_transform_done(transform)
-    else:
-      for applied_ptransform in self._step_names:
-        if not self._is_transform_done(applied_ptransform):
-          return False
-      return True
+
+    for applied_ptransform in self._step_names:
+      if not self._is_transform_done(applied_ptransform):
+        return False
+    return True
 
   def _is_transform_done(self, transform):
     tw = self._watermark_manager.get_watermarks(transform)

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/runners/direct/executor.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/runners/direct/executor.py b/sdks/python/apache_beam/runners/direct/executor.py
index ce6356c..f6a1d7f 100644
--- a/sdks/python/apache_beam/runners/direct/executor.py
+++ b/sdks/python/apache_beam/runners/direct/executor.py
@@ -240,13 +240,6 @@ class _CompletionCallback(object):
         _ExecutorServiceParallelExecutor.ExecutorUpdate(None, exception))
 
 
-class _TimerCompletionCallback(_CompletionCallback):
-
-  def __init__(self, evaluation_context, all_updates, timers):
-    super(_TimerCompletionCallback, self).__init__(
-        evaluation_context, all_updates, timers)
-
-
 class TransformExecutor(ExecutorService.CallableTask):
   """TransformExecutor will evaluate a bundle using an applied ptransform.
 
@@ -529,7 +522,7 @@ class _ExecutorServiceParallelExecutor(object):
         empty_bundle = (
             self._executor.evaluation_context.create_empty_committed_bundle(
                 applied_ptransform.inputs[0]))
-        timer_completion_callback = _TimerCompletionCallback(
+        timer_completion_callback = _CompletionCallback(
             self._executor.evaluation_context, self._executor.all_updates,
             applied_ptransform)
 

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/runners/direct/transform_evaluator.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/runners/direct/transform_evaluator.py b/sdks/python/apache_beam/runners/direct/transform_evaluator.py
index 662c61d..f34513c 100644
--- a/sdks/python/apache_beam/runners/direct/transform_evaluator.py
+++ b/sdks/python/apache_beam/runners/direct/transform_evaluator.py
@@ -278,13 +278,6 @@ class _TaggedReceivers(dict):
 
 class _ParDoEvaluator(_TransformEvaluator):
   """TransformEvaluator for ParDo transform."""
-
-  def __init__(self, evaluation_context, applied_ptransform,
-               input_committed_bundle, side_inputs, scoped_metrics_container):
-    super(_ParDoEvaluator, self).__init__(
-        evaluation_context, applied_ptransform, input_committed_bundle,
-        side_inputs, scoped_metrics_container)
-
   def start_bundle(self):
     transform = self._applied_ptransform.transform
 

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/runners/runner.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/runners/runner.py b/sdks/python/apache_beam/runners/runner.py
index 7e7ec24..6c05951 100644
--- a/sdks/python/apache_beam/runners/runner.py
+++ b/sdks/python/apache_beam/runners/runner.py
@@ -111,8 +111,7 @@ def group_by_key_input_visitor():
       # pylint: disable=wrong-import-order, wrong-import-position
       from apache_beam import GroupByKey, GroupByKeyOnly
       from apache_beam import typehints
-      if (isinstance(transform_node.transform, GroupByKey) or
-          isinstance(transform_node.transform, GroupByKeyOnly)):
+      if isinstance(transform_node.transform, (GroupByKey, GroupByKeyOnly)):
         pcoll = transform_node.inputs[0]
         input_type = pcoll.element_type
         # If input_type is not specified, then treat it as `Any`.

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/tests/pipeline_verifiers.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/tests/pipeline_verifiers.py b/sdks/python/apache_beam/tests/pipeline_verifiers.py
index 3cac658..51302b0 100644
--- a/sdks/python/apache_beam/tests/pipeline_verifiers.py
+++ b/sdks/python/apache_beam/tests/pipeline_verifiers.py
@@ -66,11 +66,8 @@ class PipelineStateMatcher(BaseMatcher):
 
 def retry_on_io_error_and_server_error(exception):
   """Filter allowing retries on file I/O errors and service error."""
-  if isinstance(exception, IOError) or \
-          (HttpError is not None and isinstance(exception, HttpError)):
-    return True
-  else:
-    return False
+  return isinstance(exception, IOError) or \
+          (HttpError is not None and isinstance(exception, HttpError))
 
 
 class FileChecksumMatcher(BaseMatcher):

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/transforms/combiners.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/transforms/combiners.py b/sdks/python/apache_beam/transforms/combiners.py
index f55d46a..a4cd462 100644
--- a/sdks/python/apache_beam/transforms/combiners.py
+++ b/sdks/python/apache_beam/transforms/combiners.py
@@ -95,8 +95,7 @@ class MeanCombineFn(core.CombineFn):
       return cy_combiners.MeanInt64Fn()
     elif input_type is float:
       return cy_combiners.MeanFloatFn()
-    else:
-      return self
+    return self
 
 
 class Count(object):
@@ -310,23 +309,19 @@ class TopCombineFn(core.CombineFn):
     if len(buffer) < self._n:
       if not buffer:
         return element_key, [element]
-      else:
-        buffer.append(element)
-        if lt(element_key, threshold):  # element_key < threshold
-          return element_key, buffer
-        else:
-          return accumulator  # with mutated buffer
+      buffer.append(element)
+      if lt(element_key, threshold):  # element_key < threshold
+        return element_key, buffer
+      return accumulator  # with mutated buffer
     elif lt(threshold, element_key):  # threshold < element_key
       buffer.append(element)
       if len(buffer) < self._buffer_size:
         return accumulator
-      else:
-        self._sort_buffer(buffer, lt)
-        min_element = buffer[-self._n]
-        threshold = self._key_fn(min_element) if self._key_fn else min_element
-        return threshold, buffer[-self._n:]
-    else:
-      return accumulator
+      self._sort_buffer(buffer, lt)
+      min_element = buffer[-self._n]
+      threshold = self._key_fn(min_element) if self._key_fn else min_element
+      return threshold, buffer[-self._n:]
+    return accumulator
 
   def merge_accumulators(self, accumulators, *args, **kwargs):
     accumulators = list(accumulators)
@@ -357,10 +352,6 @@ class TopCombineFn(core.CombineFn):
 
 
 class Largest(TopCombineFn):
-
-  def __init__(self, n):
-    super(Largest, self).__init__(n)
-
   def default_label(self):
     return 'Largest(%s)' % self._n
 

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/transforms/combiners_test.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/transforms/combiners_test.py b/sdks/python/apache_beam/transforms/combiners_test.py
index 6c101fe..af76889 100644
--- a/sdks/python/apache_beam/transforms/combiners_test.py
+++ b/sdks/python/apache_beam/transforms/combiners_test.py
@@ -164,10 +164,10 @@ class CombineTest(unittest.TestCase):
             DisplayDataItemMatcher('fn', sampleFn.fn.__name__),
             DisplayDataItemMatcher('combine_fn',
                                    transform.fn.__class__)]
-        if len(args) > 0:
+        if args:
           expected_items.append(
               DisplayDataItemMatcher('args', str(args)))
-        if len(kwargs) > 0:
+        if kwargs:
           expected_items.append(
               DisplayDataItemMatcher('kwargs', str(kwargs)))
         hc.assert_that(dd.items, hc.contains_inanyorder(*expected_items))

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/typehints/decorators.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/typehints/decorators.py b/sdks/python/apache_beam/typehints/decorators.py
index d8f0b1b..af6c499 100644
--- a/sdks/python/apache_beam/typehints/decorators.py
+++ b/sdks/python/apache_beam/typehints/decorators.py
@@ -237,8 +237,7 @@ def _unpack_positional_arg_hints(arg, hint):
     if isinstance(hint, typehints.TupleConstraint):
       return tuple(_unpack_positional_arg_hints(a, t)
                    for a, t in zip(arg, hint.tuple_types))
-    else:
-      return (typehints.Any,) * len(arg)
+    return (typehints.Any,) * len(arg)
   return hint
 
 

http://git-wip-us.apache.org/repos/asf/beam/blob/bf474a0b/sdks/python/apache_beam/typehints/typehints.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/typehints/typehints.py b/sdks/python/apache_beam/typehints/typehints.py
index 1557d85..9b41adb 100644
--- a/sdks/python/apache_beam/typehints/typehints.py
+++ b/sdks/python/apache_beam/typehints/typehints.py
@@ -1039,8 +1039,7 @@ def is_consistent_with(sub, base):
   if isinstance(base, TypeConstraint):
     if isinstance(sub, UnionConstraint):
       return all(is_consistent_with(c, base) for c in sub.union_types)
-    else:
-      return base._consistent_with_check_(sub)
+    return base._consistent_with_check_(sub)
   elif isinstance(sub, TypeConstraint):
     # Nothing but object lives above any type constraints.
     return base == object


[13/50] [abbrv] beam git commit: Removes final minor usages of OldDoFn outside OldDoFn itself

Posted by ke...@apache.org.
Removes final minor usages of OldDoFn outside OldDoFn itself


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/a3b5f968
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/a3b5f968
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/a3b5f968

Branch: refs/heads/jstorm-runner
Commit: a3b5f968c1ae2e4f712bfcf200a03d8d193fd90c
Parents: 3e24388
Author: Eugene Kirpichov <ki...@google.com>
Authored: Tue Apr 11 15:06:45 2017 -0700
Committer: Eugene Kirpichov <ki...@google.com>
Committed: Fri Apr 14 23:34:49 2017 -0700

----------------------------------------------------------------------
 .../beam/runners/core/AssignWindowsDoFn.java    |  78 -----
 .../apache/beam/runners/core/DoFnAdapters.java  | 328 -------------------
 .../apache/beam/runners/core/DoFnRunners.java   |   2 +-
 .../GroupAlsoByWindowViaOutputBufferDoFn.java   |  17 +-
 .../core/GroupAlsoByWindowViaWindowSetDoFn.java |   7 +-
 .../GroupAlsoByWindowViaWindowSetNewDoFn.java   |  11 +-
 .../core/GroupAlsoByWindowsAggregators.java     |  28 ++
 .../runners/core/GroupAlsoByWindowsDoFn.java    |  46 ---
 .../core/LateDataDroppingDoFnRunner.java        |   3 +-
 ...roupAlsoByWindowViaOutputBufferDoFnTest.java |   4 +-
 .../core/GroupAlsoByWindowsProperties.java      |  27 +-
 .../beam/runners/core/ReduceFnTester.java       |   3 +-
 .../GroupAlsoByWindowEvaluatorFactory.java      |   6 +-
 .../SparkGroupAlsoByWindowViaWindowSet.java     |   9 +-
 .../spark/translation/SparkAssignWindowFn.java  |   3 +-
 ...SparkGroupAlsoByWindowViaOutputBufferFn.java |   8 +-
 16 files changed, 85 insertions(+), 495 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/main/java/org/apache/beam/runners/core/AssignWindowsDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/AssignWindowsDoFn.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/AssignWindowsDoFn.java
deleted file mode 100644
index bbf3574..0000000
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/AssignWindowsDoFn.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.core;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import com.google.common.collect.Iterables;
-import java.util.Collection;
-import org.apache.beam.runners.core.OldDoFn.RequiresWindowAccess;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.transforms.windowing.PaneInfo;
-import org.apache.beam.sdk.transforms.windowing.WindowFn;
-import org.apache.beam.sdk.util.SystemDoFnInternal;
-import org.apache.beam.sdk.values.PCollection;
-import org.joda.time.Instant;
-
-/**
- * {@link OldDoFn} that tags elements of a {@link PCollection} with windows, according to the
- * provided {@link WindowFn}.
- *
- * @param <T> Type of elements being windowed
- * @param <W> Window type
- */
-@SystemDoFnInternal
-public class AssignWindowsDoFn<T, W extends BoundedWindow> extends OldDoFn<T, T>
-    implements RequiresWindowAccess {
-  private WindowFn<? super T, W> fn;
-
-  public AssignWindowsDoFn(WindowFn<? super T, W> fn) {
-    this.fn =
-        checkNotNull(
-            fn,
-            "%s provided to %s cannot be null",
-            WindowFn.class.getSimpleName(),
-            AssignWindowsDoFn.class.getSimpleName());
-  }
-
-  @Override
-  @SuppressWarnings("unchecked")
-  public void processElement(final ProcessContext c) throws Exception {
-    Collection<W> windows =
-        ((WindowFn<T, W>) fn).assignWindows(
-            ((WindowFn<T, W>) fn).new AssignContext() {
-                @Override
-                public T element() {
-                  return c.element();
-                }
-
-                @Override
-                public Instant timestamp() {
-                  return c.timestamp();
-                }
-
-                @Override
-                public BoundedWindow window() {
-                  return Iterables.getOnlyElement(c.windowingInternals().windows());
-                }
-              });
-
-    c.windowingInternals()
-        .outputWindowedValue(c.element(), c.timestamp(), windows, PaneInfo.NO_FIRING);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnAdapters.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnAdapters.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnAdapters.java
deleted file mode 100644
index 66ad736..0000000
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnAdapters.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.core;
-
-import java.io.IOException;
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.AggregatorRetriever;
-import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.DoFn.Context;
-import org.apache.beam.sdk.transforms.DoFn.OnTimerContext;
-import org.apache.beam.sdk.transforms.DoFn.ProcessContext;
-import org.apache.beam.sdk.transforms.display.DisplayData;
-import org.apache.beam.sdk.transforms.reflect.DoFnInvoker;
-import org.apache.beam.sdk.transforms.reflect.DoFnInvokers;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
-import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
-import org.apache.beam.sdk.transforms.splittabledofn.RestrictionTracker;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.transforms.windowing.PaneInfo;
-import org.apache.beam.sdk.util.Timer;
-import org.apache.beam.sdk.util.state.State;
-import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.sdk.values.TupleTag;
-import org.joda.time.Duration;
-import org.joda.time.Instant;
-
-/**
- * Utility class containing adapters to/from {@link DoFn} and {@link OldDoFn}.
- *
- * @deprecated This class will go away when we start running {@link DoFn}'s directly (using {@link
- *     DoFnInvoker}) rather than via {@link OldDoFn}.
- */
-@Deprecated
-public class DoFnAdapters {
-  /** Should not be instantiated. */
-  private DoFnAdapters() {}
-
-  /** Creates an {@link OldDoFn} that delegates to the {@link DoFn}. */
-  @SuppressWarnings({"unchecked", "rawtypes"})
-  public static <InputT, OutputT> OldDoFn<InputT, OutputT> toOldDoFn(DoFn<InputT, OutputT> fn) {
-    DoFnSignature signature = DoFnSignatures.getSignature((Class) fn.getClass());
-    if (signature.processElement().observesWindow()) {
-      return new WindowDoFnAdapter<>(fn);
-    } else {
-      return new SimpleDoFnAdapter<>(fn);
-    }
-  }
-
-  /**
-   * Wraps a {@link DoFn} that doesn't require access to {@link BoundedWindow} as an {@link
-   * OldDoFn}.
-   */
-  private static class SimpleDoFnAdapter<InputT, OutputT> extends OldDoFn<InputT, OutputT> {
-    private final DoFn<InputT, OutputT> fn;
-    private transient DoFnInvoker<InputT, OutputT> invoker;
-
-    SimpleDoFnAdapter(DoFn<InputT, OutputT> fn) {
-      super(AggregatorRetriever.getDelegatingAggregators(fn));
-      this.fn = fn;
-      this.invoker = DoFnInvokers.invokerFor(fn);
-    }
-
-    @Override
-    public void setup() throws Exception {
-      this.invoker.invokeSetup();
-    }
-
-    @Override
-    public void startBundle(Context c) throws Exception {
-      fn.prepareForProcessing();
-      invoker.invokeStartBundle(new ContextAdapter<>(fn, c));
-    }
-
-    @Override
-    public void finishBundle(Context c) throws Exception {
-      invoker.invokeFinishBundle(new ContextAdapter<>(fn, c));
-    }
-
-    @Override
-    public void teardown() throws Exception {
-      this.invoker.invokeTeardown();
-    }
-
-    @Override
-    public void processElement(ProcessContext c) throws Exception {
-      ProcessContextAdapter<InputT, OutputT> adapter = new ProcessContextAdapter<>(fn, c);
-      invoker.invokeProcessElement(adapter);
-    }
-
-    @Override
-    public Duration getAllowedTimestampSkew() {
-      return fn.getAllowedTimestampSkew();
-    }
-
-    @Override
-    public void populateDisplayData(DisplayData.Builder builder) {
-      builder.delegate(fn);
-    }
-
-    private void readObject(java.io.ObjectInputStream in)
-        throws IOException, ClassNotFoundException {
-      in.defaultReadObject();
-      this.invoker = DoFnInvokers.invokerFor(fn);
-    }
-  }
-
-  /** Wraps a {@link DoFn} that requires access to {@link BoundedWindow} as an {@link OldDoFn}. */
-  private static class WindowDoFnAdapter<InputT, OutputT> extends SimpleDoFnAdapter<InputT, OutputT>
-      implements OldDoFn.RequiresWindowAccess {
-
-    WindowDoFnAdapter(DoFn<InputT, OutputT> fn) {
-      super(fn);
-    }
-  }
-
-  /**
-   * Wraps an {@link OldDoFn.Context} as a {@link DoFnInvoker.ArgumentProvider} inside a {@link
-   * DoFn.StartBundle} or {@link DoFn.FinishBundle} method, which means the extra context is
-   * unavailable.
-   */
-  private static class ContextAdapter<InputT, OutputT> extends DoFn<InputT, OutputT>.Context
-      implements DoFnInvoker.ArgumentProvider<InputT, OutputT> {
-
-    private OldDoFn<InputT, OutputT>.Context context;
-
-    private ContextAdapter(DoFn<InputT, OutputT> fn, OldDoFn<InputT, OutputT>.Context context) {
-      fn.super();
-      this.context = context;
-      super.setupDelegateAggregators();
-    }
-
-    @Override
-    public PipelineOptions getPipelineOptions() {
-      return context.getPipelineOptions();
-    }
-
-    @Override
-    public void output(OutputT output) {
-      context.output(output);
-    }
-
-    @Override
-    public void outputWithTimestamp(OutputT output, Instant timestamp) {
-      context.outputWithTimestamp(output, timestamp);
-    }
-
-    @Override
-    public <T> void output(TupleTag<T> tag, T output) {
-      context.output(tag, output);
-    }
-
-    @Override
-    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
-      context.outputWithTimestamp(tag, output, timestamp);
-    }
-
-    @Override
-    protected <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT> createAggregator(
-        String name,
-        CombineFn<AggInputT, ?, AggOutputT> combiner) {
-      return context.createAggregatorInternal(name, combiner);
-    }
-
-    @Override
-    public BoundedWindow window() {
-      // The OldDoFn doesn't allow us to ask for these outside processElement, so this
-      // should be unreachable.
-      throw new UnsupportedOperationException(
-          "Can only get the window in processElement; elsewhere there is no defined window.");
-    }
-
-    @Override
-    public Context context(DoFn<InputT, OutputT> doFn) {
-      return this;
-    }
-
-    @Override
-    public ProcessContext processContext(DoFn<InputT, OutputT> doFn) {
-      throw new UnsupportedOperationException(
-          "Can only get a ProcessContext in processElement");
-    }
-
-    @Override
-    public OnTimerContext onTimerContext(DoFn<InputT, OutputT> doFn) {
-      throw new UnsupportedOperationException(
-          "Timers are not supported for OldDoFn");
-    }
-
-    @Override
-    public RestrictionTracker<?> restrictionTracker() {
-      throw new UnsupportedOperationException("This is a non-splittable DoFn");
-    }
-
-    @Override
-    public State state(String stateId) {
-      throw new UnsupportedOperationException("State is not supported by this runner");
-    }
-
-    @Override
-    public Timer timer(String timerId) {
-      throw new UnsupportedOperationException("Timers are not supported by this runner");
-    }
-  }
-
-  /**
-   * Wraps an {@link OldDoFn.ProcessContext} as a {@link DoFnInvoker.ArgumentProvider} method.
-   */
-  private static class ProcessContextAdapter<InputT, OutputT>
-      extends DoFn<InputT, OutputT>.ProcessContext
-      implements DoFnInvoker.ArgumentProvider<InputT, OutputT> {
-
-    private OldDoFn<InputT, OutputT>.ProcessContext context;
-
-    private ProcessContextAdapter(
-        DoFn<InputT, OutputT> fn, OldDoFn<InputT, OutputT>.ProcessContext context) {
-      fn.super();
-      this.context = context;
-    }
-
-    @Override
-    public PipelineOptions getPipelineOptions() {
-      return context.getPipelineOptions();
-    }
-
-    @Override
-    public <T> T sideInput(PCollectionView<T> view) {
-      return context.sideInput(view);
-    }
-
-    @Override
-    public void output(OutputT output) {
-      context.output(output);
-    }
-
-    @Override
-    public void outputWithTimestamp(OutputT output, Instant timestamp) {
-      context.outputWithTimestamp(output, timestamp);
-    }
-
-    @Override
-    public <T> void output(TupleTag<T> tag, T output) {
-      context.output(tag, output);
-    }
-
-    @Override
-    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
-      context.outputWithTimestamp(tag, output, timestamp);
-    }
-
-    @Override
-    protected <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT> createAggregator(
-        String name, CombineFn<AggInputT, ?, AggOutputT> combiner) {
-      return context.createAggregatorInternal(name, combiner);
-    }
-
-    @Override
-    public InputT element() {
-      return context.element();
-    }
-
-    @Override
-    public Instant timestamp() {
-      return context.timestamp();
-    }
-
-    @Override
-    public PaneInfo pane() {
-      return context.pane();
-    }
-
-    @Override
-    public void updateWatermark(Instant watermark) {
-      throw new UnsupportedOperationException("Only splittable DoFn's can use updateWatermark()");
-    }
-
-    @Override
-    public BoundedWindow window() {
-      return context.window();
-    }
-
-    @Override
-    public Context context(DoFn<InputT, OutputT> doFn) {
-      return this;
-    }
-
-    @Override
-    public ProcessContext processContext(DoFn<InputT, OutputT> doFn) {
-      return this;
-    }
-
-    @Override
-    public OnTimerContext onTimerContext(DoFn<InputT, OutputT> doFn) {
-      throw new UnsupportedOperationException("Timers are not supported for OldDoFn");
-    }
-
-    @Override
-    public RestrictionTracker<?> restrictionTracker() {
-      throw new UnsupportedOperationException("This is a non-splittable DoFn");
-    }
-
-    @Override
-    public State state(String stateId) {
-      throw new UnsupportedOperationException("State is not supported by this runner");
-    }
-
-    @Override
-    public Timer timer(String timerId) {
-      throw new UnsupportedOperationException("Timers are not supported by this runner");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnRunners.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnRunners.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnRunners.java
index b09ee08..06db6e1 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnRunners.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnRunners.java
@@ -105,7 +105,7 @@ public class DoFnRunners {
   /**
    * Returns an implementation of {@link DoFnRunner} that handles late data dropping.
    *
-   * <p>It drops elements from expired windows before they reach the underlying {@link OldDoFn}.
+   * <p>It drops elements from expired windows before they reach the underlying {@link DoFn}.
    */
   public static <K, InputT, OutputT, W extends BoundedWindow>
       DoFnRunner<KeyedWorkItem<K, InputT>, KV<K, OutputT>> lateDataDroppingRunner(

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaOutputBufferDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaOutputBufferDoFn.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaOutputBufferDoFn.java
index 5508b2e..5bd7e2d 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaOutputBufferDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaOutputBufferDoFn.java
@@ -17,23 +17,34 @@
  */
 package org.apache.beam.runners.core;
 
+import static org.apache.beam.runners.core.GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER;
+import static org.apache.beam.runners.core.GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_LATENESS_COUNTER;
+
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.beam.runners.core.construction.Triggers;
 import org.apache.beam.runners.core.triggers.ExecutableTriggerStateMachine;
 import org.apache.beam.runners.core.triggers.TriggerStateMachines;
+import org.apache.beam.sdk.transforms.Aggregator;
+import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.SystemDoFnInternal;
+import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.util.WindowingStrategy;
+import org.apache.beam.sdk.values.KV;
 import org.joda.time.Instant;
 
 /**
- * The default batch {@link GroupAlsoByWindowsDoFn} implementation, if no specialized "fast path"
- * implementation is applicable.
+ * The default batch {@link GroupAlsoByWindowsAggregators} implementation, if no specialized "fast
+ * path" implementation is applicable.
  */
 @SystemDoFnInternal
 public class GroupAlsoByWindowViaOutputBufferDoFn<K, InputT, OutputT, W extends BoundedWindow>
-    extends GroupAlsoByWindowsDoFn<K, InputT, OutputT, W> {
+    extends OldDoFn<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> {
+  protected final Aggregator<Long, Long> droppedDueToClosedWindow =
+      createAggregator(DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER, Sum.ofLongs());
+  protected final Aggregator<Long, Long> droppedDueToLateness =
+      createAggregator(DROPPED_DUE_TO_LATENESS_COUNTER, Sum.ofLongs());
 
   private final WindowingStrategy<?, W> strategy;
   private final StateInternalsFactory<K> stateInternalsFactory;

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
index bf48df1..e6be93a 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetDoFn.java
@@ -28,7 +28,7 @@ import org.apache.beam.sdk.util.WindowingStrategy;
 import org.apache.beam.sdk.values.KV;
 
 /**
- * A general {@link GroupAlsoByWindowsDoFn}. This delegates all of the logic to the
+ * A general {@link GroupAlsoByWindowsAggregators}. This delegates all of the logic to the
  * {@link ReduceFnRunner}.
  */
 @SystemDoFnInternal
@@ -46,9 +46,10 @@ public class GroupAlsoByWindowViaWindowSetDoFn<
 
   protected final Aggregator<Long, Long> droppedDueToClosedWindow =
       createAggregator(
-          GroupAlsoByWindowsDoFn.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER, Sum.ofLongs());
+          GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER, Sum.ofLongs());
   protected final Aggregator<Long, Long> droppedDueToLateness =
-      createAggregator(GroupAlsoByWindowsDoFn.DROPPED_DUE_TO_LATENESS_COUNTER, Sum.ofLongs());
+      createAggregator(
+          GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_LATENESS_COUNTER, Sum.ofLongs());
 
   private final WindowingStrategy<Object, W> windowingStrategy;
   private final StateInternalsFactory<K> stateInternalsFactory;

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetNewDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetNewDoFn.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetNewDoFn.java
index 0cf6e2d..e146bfc 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetNewDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetNewDoFn.java
@@ -35,7 +35,7 @@ import org.apache.beam.sdk.values.TupleTag;
 import org.joda.time.Instant;
 
 /**
- * A general {@link GroupAlsoByWindowsDoFn}. This delegates all of the logic to the
+ * A general {@link GroupAlsoByWindowsAggregators}. This delegates all of the logic to the
  * {@link ReduceFnRunner}.
  */
 @SystemDoFnInternal
@@ -61,9 +61,10 @@ public class GroupAlsoByWindowViaWindowSetNewDoFn<
 
   protected final Aggregator<Long, Long> droppedDueToClosedWindow =
       createAggregator(
-          GroupAlsoByWindowsDoFn.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER, Sum.ofLongs());
+          GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER, Sum.ofLongs());
   protected final Aggregator<Long, Long> droppedDueToLateness =
-      createAggregator(GroupAlsoByWindowsDoFn.DROPPED_DUE_TO_LATENESS_COUNTER, Sum.ofLongs());
+      createAggregator(
+          GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_LATENESS_COUNTER, Sum.ofLongs());
   private final WindowingStrategy<Object, W> windowingStrategy;
   private SystemReduceFn<K, InputT, ?, OutputT, W> reduceFn;
   private transient StateInternalsFactory<K> stateInternalsFactory;
@@ -144,10 +145,6 @@ public class GroupAlsoByWindowViaWindowSetNewDoFn<
     reduceFnRunner.persist();
   }
 
-  public OldDoFn<KeyedWorkItem<K, InputT>, KV<K, OutputT>> asDoFn() {
-    throw new RuntimeException("Not implement!");
-  }
-
   public Aggregator<Long, Long> getDroppedDueToLatenessAggregator() {
     return droppedDueToLateness;
   }

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowsAggregators.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowsAggregators.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowsAggregators.java
new file mode 100644
index 0000000..7c4f252
--- /dev/null
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowsAggregators.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.runners.core;
+
+import static org.apache.beam.runners.core.GroupByKeyViaGroupByKeyOnly.GroupAlsoByWindow;
+
+/**
+ * Standard aggregator names related to {@link GroupAlsoByWindow}.
+ */
+public abstract class GroupAlsoByWindowsAggregators {
+  public static final String DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER = "DroppedDueToClosedWindow";
+  public static final String DROPPED_DUE_TO_LATENESS_COUNTER = "DroppedDueToLateness";
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowsDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowsDoFn.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowsDoFn.java
deleted file mode 100644
index 7e96136..0000000
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowsDoFn.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.core;
-
-import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.Sum;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.util.SystemDoFnInternal;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.KV;
-
-/**
- * {@link OldDoFn} that merges windows and groups elements in those windows, optionally
- * combining values.
- *
- * @param <K> key type
- * @param <InputT> input value element type
- * @param <OutputT> output value element type
- * @param <W> window type
- */
-@SystemDoFnInternal
-public abstract class GroupAlsoByWindowsDoFn<K, InputT, OutputT, W extends BoundedWindow>
-    extends OldDoFn<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> {
-  public static final String DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER = "DroppedDueToClosedWindow";
-  public static final String DROPPED_DUE_TO_LATENESS_COUNTER = "DroppedDueToLateness";
-
-  protected final Aggregator<Long, Long> droppedDueToClosedWindow =
-      createAggregator(DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER, Sum.ofLongs());
-  protected final Aggregator<Long, Long> droppedDueToLateness =
-      createAggregator(DROPPED_DUE_TO_LATENESS_COUNTER, Sum.ofLongs());
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/main/java/org/apache/beam/runners/core/LateDataDroppingDoFnRunner.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/LateDataDroppingDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/LateDataDroppingDoFnRunner.java
index 4d41527..cdc7ce7 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/LateDataDroppingDoFnRunner.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/LateDataDroppingDoFnRunner.java
@@ -22,6 +22,7 @@ import com.google.common.base.Function;
 import com.google.common.base.Predicate;
 import com.google.common.collect.Iterables;
 import org.apache.beam.sdk.transforms.Aggregator;
+import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.TimeDomain;
 import org.apache.beam.sdk.util.WindowTracing;
@@ -32,7 +33,7 @@ import org.joda.time.Instant;
 
 /**
  * A customized {@link DoFnRunner} that handles late data dropping for
- * a {@link KeyedWorkItem} input {@link OldDoFn}.
+ * a {@link KeyedWorkItem} input {@link DoFn}.
  *
  * <p>It expands windows before checking data lateness.
  *

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowViaOutputBufferDoFnTest.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowViaOutputBufferDoFnTest.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowViaOutputBufferDoFnTest.java
index cb8d494..e725cd2 100644
--- a/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowViaOutputBufferDoFnTest.java
+++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowViaOutputBufferDoFnTest.java
@@ -43,10 +43,10 @@ public class GroupAlsoByWindowViaOutputBufferDoFnTest {
 
     @Override
     public <W extends BoundedWindow>
-    GroupAlsoByWindowsDoFn<K, InputT, Iterable<InputT>, W> forStrategy(
+        GroupAlsoByWindowViaOutputBufferDoFn<K, InputT, Iterable<InputT>, W> forStrategy(
             WindowingStrategy<?, W> windowingStrategy,
             StateInternalsFactory<K> stateInternalsFactory) {
-      return new GroupAlsoByWindowViaOutputBufferDoFn<K, InputT, Iterable<InputT>, W>(
+      return new GroupAlsoByWindowViaOutputBufferDoFn<>(
           windowingStrategy,
           stateInternalsFactory,
           SystemReduceFn.<K, InputT, W>buffering(inputCoder));

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowsProperties.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowsProperties.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowsProperties.java
index d0a8923..a5031b8 100644
--- a/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowsProperties.java
+++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowsProperties.java
@@ -57,7 +57,7 @@ import org.joda.time.Duration;
 import org.joda.time.Instant;
 
 /**
- * Properties of {@link GroupAlsoByWindowsDoFn}.
+ * Properties of {@link GroupAlsoByWindowsAggregators}.
  *
  * <p>Some properties may not hold of some implementations, due to restrictions on the context in
  * which the implementation is applicable. For example, some {@code GroupAlsoByWindows} may not
@@ -66,12 +66,13 @@ import org.joda.time.Instant;
 public class GroupAlsoByWindowsProperties {
 
   /**
-   * A factory of {@link GroupAlsoByWindowsDoFn} so that the various properties can provide the
-   * appropriate windowing strategy under test.
+   * A factory of {@link GroupAlsoByWindowsAggregators} so that the various properties can provide
+   * the appropriate windowing strategy under test.
    */
   public interface GroupAlsoByWindowsDoFnFactory<K, InputT, OutputT> {
-    <W extends BoundedWindow> GroupAlsoByWindowsDoFn<K, InputT, OutputT, W> forStrategy(
-        WindowingStrategy<?, W> strategy, StateInternalsFactory<K> stateInternalsFactory);
+    <W extends BoundedWindow>
+        GroupAlsoByWindowViaOutputBufferDoFn<K, InputT, OutputT, W> forStrategy(
+            WindowingStrategy<?, W> strategy, StateInternalsFactory<K> stateInternalsFactory);
   }
 
   /**
@@ -311,7 +312,7 @@ public class GroupAlsoByWindowsProperties {
   }
 
   /**
-   * Tests that the given {@link GroupAlsoByWindowsDoFn} implementation combines elements per
+   * Tests that the given {@link GroupAlsoByWindowsAggregators} implementation combines elements per
    * session window correctly according to the provided {@link CombineFn}.
    */
   public static void combinesElementsPerSession(
@@ -498,7 +499,7 @@ public class GroupAlsoByWindowsProperties {
   }
 
   /**
-   * Tests that the given {@link GroupAlsoByWindowsDoFn} implementation combines elements per
+   * Tests that the given {@link GroupAlsoByWindowsAggregators} implementation combines elements per
    * session window correctly according to the provided {@link CombineFn}.
    */
   public static void combinesElementsPerSessionWithEndOfWindowTimestamp(
@@ -597,7 +598,7 @@ public class GroupAlsoByWindowsProperties {
 
   private static <K, InputT, OutputT, W extends BoundedWindow>
       List<WindowedValue<KV<K, OutputT>>> processElement(
-          GroupAlsoByWindowsDoFn<K, InputT, OutputT, W> fn,
+          GroupAlsoByWindowViaOutputBufferDoFn<K, InputT, OutputT, W> fn,
           KV<K, Iterable<WindowedValue<InputT>>> element)
           throws Exception {
     TestProcessContext<K, InputT, OutputT, W> c = new TestProcessContext<>(fn, element);
@@ -621,18 +622,18 @@ public class GroupAlsoByWindowsProperties {
   }
 
   /**
-   * A {@link GroupAlsoByWindowsDoFn.ProcessContext} providing just enough context for a {@link
-   * GroupAlsoByWindowsDoFn} - namely, information about the element and output via {@link
-   * WindowingInternals}, but no side inputs/outputs and no normal output.
+   * A {@link GroupAlsoByWindowViaOutputBufferDoFn.ProcessContext} providing just enough context for
+   * a {@link GroupAlsoByWindowsAggregators} - namely, information about the element and output via
+   * {@link WindowingInternals}, but no side inputs/outputs and no normal output.
    */
   private static class TestProcessContext<K, InputT, OutputT, W extends BoundedWindow>
-      extends GroupAlsoByWindowsDoFn<K, InputT, OutputT, W>.ProcessContext {
+      extends GroupAlsoByWindowViaOutputBufferDoFn<K, InputT, OutputT, W>.ProcessContext {
     private final PipelineOptions options = PipelineOptionsFactory.create();
     private final KV<K, Iterable<WindowedValue<InputT>>> element;
     private final List<WindowedValue<KV<K, OutputT>>> output = new ArrayList<>();
 
     private TestProcessContext(
-        GroupAlsoByWindowsDoFn<K, InputT, OutputT, W> fn,
+        GroupAlsoByWindowViaOutputBufferDoFn<K, InputT, OutputT, W> fn,
         KV<K, Iterable<WindowedValue<InputT>>> element) {
       fn.super();
       this.element = element;

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/core-java/src/test/java/org/apache/beam/runners/core/ReduceFnTester.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/ReduceFnTester.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/ReduceFnTester.java
index 914550e..923b2c3 100644
--- a/runners/core-java/src/test/java/org/apache/beam/runners/core/ReduceFnTester.java
+++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/ReduceFnTester.java
@@ -113,7 +113,8 @@ public class ReduceFnTester<InputT, OutputT, W extends BoundedWindow> {
   private boolean autoAdvanceOutputWatermark = true;
 
   private final InMemoryLongSumAggregator droppedDueToClosedWindow =
-      new InMemoryLongSumAggregator(GroupAlsoByWindowsDoFn.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER);
+      new InMemoryLongSumAggregator(
+          GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER);
 
   /**
    * Creates a {@link ReduceFnTester} for the given {@link WindowingStrategy}, creating

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
index ce7b12a..ce29709 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
@@ -24,7 +24,7 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Iterables;
 import java.util.ArrayList;
 import java.util.Collection;
-import org.apache.beam.runners.core.GroupAlsoByWindowsDoFn;
+import org.apache.beam.runners.core.GroupAlsoByWindowsAggregators;
 import org.apache.beam.runners.core.GroupByKeyViaGroupByKeyOnly;
 import org.apache.beam.runners.core.GroupByKeyViaGroupByKeyOnly.GroupAlsoByWindow;
 import org.apache.beam.runners.core.GroupByKeyViaGroupByKeyOnly.GroupByKeyOnly;
@@ -146,10 +146,10 @@ class GroupAlsoByWindowEvaluatorFactory implements TransformEvaluatorFactory {
           application.getTransform().getValueCoder(inputBundle.getPCollection().getCoder());
       reduceFn = SystemReduceFn.buffering(valueCoder);
       droppedDueToClosedWindow = aggregatorChanges.createSystemAggregator(stepContext,
-          GroupAlsoByWindowsDoFn.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER,
+          GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER,
           Sum.ofLongs());
       droppedDueToLateness = aggregatorChanges.createSystemAggregator(stepContext,
-          GroupAlsoByWindowsDoFn.DROPPED_DUE_TO_LATENESS_COUNTER,
+          GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_LATENESS_COUNTER,
           Sum.ofLongs());
     }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
index 029c28a..1b40613 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
@@ -23,7 +23,8 @@ import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
-import org.apache.beam.runners.core.GroupAlsoByWindowsDoFn;
+import org.apache.beam.runners.core.GroupAlsoByWindowsAggregators;
+import org.apache.beam.runners.core.GroupByKeyViaGroupByKeyOnly.GroupAlsoByWindow;
 import org.apache.beam.runners.core.LateDataUtils;
 import org.apache.beam.runners.core.OutputWindowedValue;
 import org.apache.beam.runners.core.ReduceFnRunner;
@@ -76,7 +77,7 @@ import scala.reflect.ClassTag;
 import scala.runtime.AbstractFunction1;
 
 /**
- * An implementation of {@link org.apache.beam.runners.core.GroupAlsoByWindowViaWindowSetDoFn}
+ * An implementation of {@link GroupAlsoByWindow}
  * logic for grouping by windows and controlling trigger firings and pane accumulation.
  *
  * <p>This implementation is a composite of Spark transformations revolving around state management
@@ -208,9 +209,9 @@ public class SparkGroupAlsoByWindowViaWindowSet {
         // use in memory Aggregators since Spark Accumulators are not resilient
         // in stateful operators, once done with this partition.
         final InMemoryLongSumAggregator droppedDueToClosedWindow = new InMemoryLongSumAggregator(
-            GroupAlsoByWindowsDoFn.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER);
+            GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER);
         final InMemoryLongSumAggregator droppedDueToLateness = new InMemoryLongSumAggregator(
-            GroupAlsoByWindowsDoFn.DROPPED_DUE_TO_LATENESS_COUNTER);
+            GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_LATENESS_COUNTER);
 
         AbstractIterator<
             Tuple2</*K*/ ByteArray, Tuple2<StateAndTimers, /*WV<KV<K, Itr<I>>>*/ List<byte[]>>>>

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkAssignWindowFn.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkAssignWindowFn.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkAssignWindowFn.java
index 18a3dd8..088b981 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkAssignWindowFn.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkAssignWindowFn.java
@@ -29,7 +29,8 @@ import org.joda.time.Instant;
 
 
 /**
- * An implementation of {@link org.apache.beam.runners.core.AssignWindowsDoFn} for the Spark runner.
+ * An implementation of {@link org.apache.beam.sdk.transforms.windowing.Window.Assign} for the Spark
+ * runner.
  */
 public class SparkAssignWindowFn<T, W extends BoundedWindow>
     implements Function<WindowedValue<T>, WindowedValue<T>> {

http://git-wip-us.apache.org/repos/asf/beam/blob/a3b5f968/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
index ccc0fa3..85adca9 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
@@ -21,8 +21,8 @@ package org.apache.beam.runners.spark.translation;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
-import org.apache.beam.runners.core.GroupAlsoByWindowViaOutputBufferDoFn;
-import org.apache.beam.runners.core.GroupAlsoByWindowsDoFn;
+import org.apache.beam.runners.core.GroupAlsoByWindowsAggregators;
+import org.apache.beam.runners.core.GroupByKeyViaGroupByKeyOnly.GroupAlsoByWindow;
 import org.apache.beam.runners.core.InMemoryTimerInternals;
 import org.apache.beam.runners.core.OutputWindowedValue;
 import org.apache.beam.runners.core.ReduceFnRunner;
@@ -48,7 +48,7 @@ import org.apache.spark.api.java.function.FlatMapFunction;
 import org.joda.time.Instant;
 
 /**
- * An implementation of {@link GroupAlsoByWindowViaOutputBufferDoFn}
+ * An implementation of {@link GroupAlsoByWindow}
  * for the Spark runner.
  */
 public class SparkGroupAlsoByWindowViaOutputBufferFn<K, InputT, W extends BoundedWindow>
@@ -75,7 +75,7 @@ public class SparkGroupAlsoByWindowViaOutputBufferFn<K, InputT, W extends Bounde
 
     droppedDueToClosedWindow = runtimeContext.createAggregator(
         accumulator,
-        GroupAlsoByWindowsDoFn.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER,
+        GroupAlsoByWindowsAggregators.DROPPED_DUE_TO_CLOSED_WINDOW_COUNTER,
         Sum.ofLongs());
   }
 


[28/50] [abbrv] beam git commit: Remove getSideInputWindow

Posted by ke...@apache.org.
Remove getSideInputWindow

Callers should instead get the Default WindowMappingFn if no explicit
WindowMappingFn is available.

Migrate all existing callers within the SDK and runners.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/79b066da
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/79b066da
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/79b066da

Branch: refs/heads/jstorm-runner
Commit: 79b066da4ed26fae63035fb16c03508ea77bf6db
Parents: 075b621
Author: Thomas Groh <tg...@google.com>
Authored: Tue Apr 4 10:38:36 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Mon Apr 17 13:09:39 2017 -0700

----------------------------------------------------------------------
 .../java/org/apache/beam/runners/core/OldDoFn.java     |  3 ++-
 .../beam/runners/spark/util/SparkSideInputReader.java  |  3 +--
 .../apache/beam/sdk/transforms/windowing/WindowFn.java | 13 -------------
 .../org/apache/beam/sdk/testing/StaticWindowsTest.java | 10 +++++++---
 4 files changed, 10 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/79b066da/runners/core-java/src/main/java/org/apache/beam/runners/core/OldDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/OldDoFn.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/OldDoFn.java
index 507ee50..323edf9 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/OldDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/OldDoFn.java
@@ -42,6 +42,7 @@ import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.transforms.display.HasDisplayData;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
+import org.apache.beam.sdk.transforms.windowing.WindowMappingFn;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.joda.time.Duration;
@@ -241,7 +242,7 @@ public abstract class OldDoFn<InputT, OutputT> implements Serializable, HasDispl
      * window of the main input element.
      *
      * <p>See
-     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn#getSideInputWindow}
+     * {@link WindowMappingFn#getSideInputWindow}
      * for how this corresponding window is determined.
      *
      * @throws IllegalArgumentException if this is not a side input

http://git-wip-us.apache.org/repos/asf/beam/blob/79b066da/runners/spark/src/main/java/org/apache/beam/runners/spark/util/SparkSideInputReader.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/util/SparkSideInputReader.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/util/SparkSideInputReader.java
index c8e9850..d6e1a94 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/util/SparkSideInputReader.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/util/SparkSideInputReader.java
@@ -54,9 +54,8 @@ public class SparkSideInputReader implements SideInputReader {
     checkNotNull(windowedBroadcastHelper, "SideInput for view " + view + " is not available.");
 
     //--- sideInput window
-    WindowingStrategy<?, ?> sideInputWindowStrategy = windowedBroadcastHelper.getKey();
     final BoundedWindow sideInputWindow =
-        sideInputWindowStrategy.getWindowFn().getSideInputWindow(window);
+        view.getWindowMappingFn().getSideInputWindow(window);
 
     //--- match the appropriate sideInput window.
     // a tag will point to all matching sideInputs, that is all windows.

http://git-wip-us.apache.org/repos/asf/beam/blob/79b066da/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/WindowFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/WindowFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/WindowFn.java
index 2f9e6c1..5ebbb41 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/WindowFn.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/WindowFn.java
@@ -124,19 +124,6 @@ public abstract class WindowFn<T, W extends BoundedWindow>
   public abstract Coder<W> windowCoder();
 
   /**
-   * Returns the window of the side input corresponding to the given window of
-   * the main input. If not overridden, will use the window returned by calling
-   * {@link WindowMappingFn#getSideInputWindow(BoundedWindow)} on the result of
-   * {@link #getDefaultWindowMappingFn()}.
-   *
-   * @deprecated see {@link #getDefaultWindowMappingFn()}
-   */
-  @Deprecated
-  public W getSideInputWindow(BoundedWindow window) {
-    return getDefaultWindowMappingFn().getSideInputWindow(window);
-  }
-
-  /**
    * Returns the default {@link WindowMappingFn} to use to map main input windows to side input
    * windows. This should accept arbitrary main input windows, and produce a {@link BoundedWindow}
    * that can be produced by this {@link WindowFn}.

http://git-wip-us.apache.org/repos/asf/beam/blob/79b066da/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/StaticWindowsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/StaticWindowsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/StaticWindowsTest.java
index e662619..7ee48c8 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/StaticWindowsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/StaticWindowsTest.java
@@ -70,8 +70,12 @@ public class StaticWindowsTest {
     WindowFn<Object, BoundedWindow> fn =
         StaticWindows.of(IntervalWindow.getCoder(), ImmutableList.of(first, second));
 
-    assertThat(fn.getSideInputWindow(first), Matchers.<BoundedWindow>equalTo(first));
-    assertThat(fn.getSideInputWindow(second), Matchers.<BoundedWindow>equalTo(second));
+    assertThat(
+        fn.getDefaultWindowMappingFn().getSideInputWindow(first),
+        Matchers.<BoundedWindow>equalTo(first));
+    assertThat(
+        fn.getDefaultWindowMappingFn().getSideInputWindow(second),
+        Matchers.<BoundedWindow>equalTo(second));
   }
 
   @Test
@@ -80,7 +84,7 @@ public class StaticWindowsTest {
         StaticWindows.of(IntervalWindow.getCoder(), ImmutableList.of(second));
     thrown.expect(IllegalArgumentException.class);
     thrown.expectMessage("contains");
-    fn.getSideInputWindow(first);
+    fn.getDefaultWindowMappingFn().getSideInputWindow(first);
   }
 
   @Test


[37/50] [abbrv] beam git commit: clean up description for sdk_location

Posted by ke...@apache.org.
clean up description for sdk_location


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/3bbdbce6
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/3bbdbce6
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/3bbdbce6

Branch: refs/heads/jstorm-runner
Commit: 3bbdbce67b60736b995aaea4e7a8f2b80e2866ee
Parents: 9b0cc98
Author: Ahmet Altay <al...@google.com>
Authored: Mon Apr 17 17:52:08 2017 -0700
Committer: Ahmet Altay <al...@google.com>
Committed: Mon Apr 17 18:41:24 2017 -0700

----------------------------------------------------------------------
 sdks/python/apache_beam/utils/pipeline_options.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/3bbdbce6/sdks/python/apache_beam/utils/pipeline_options.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/utils/pipeline_options.py b/sdks/python/apache_beam/utils/pipeline_options.py
index ecc2e32..e622580 100644
--- a/sdks/python/apache_beam/utils/pipeline_options.py
+++ b/sdks/python/apache_beam/utils/pipeline_options.py
@@ -475,11 +475,11 @@ class SetupOptions(PipelineOptions):
         '--sdk_location',
         default='default',
         help=
-        ('Override the default GitHub location from where Dataflow SDK is '
-         'downloaded. It can be an URL, a GCS path, or a local path to an '
-         'SDK tarball. Workflow submissions will download or copy an SDK '
-         'tarball from here. If the string "default", '
-         'a standard SDK location is used. If empty, no SDK is copied.'))
+        ('Override the default location from where the Beam SDK is downloaded. '
+         'It can be a URL, a GCS path, or a local path to an SDK tarball. '
+         'Workflow submissions will download or copy an SDK tarball from here. '
+         'If set to the string "default", a standard SDK location is used. If '
+         'empty, no SDK is copied.'))
     parser.add_argument(
         '--extra_package', '--extra_packages',
         dest='extra_packages',


[14/50] [abbrv] beam git commit: This closes #2500

Posted by ke...@apache.org.
This closes #2500


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/f7d727c0
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/f7d727c0
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/f7d727c0

Branch: refs/heads/jstorm-runner
Commit: f7d727c0f7ada7a162f5cb73f658ce52f094dd86
Parents: fdbadfc a3b5f96
Author: Eugene Kirpichov <ki...@google.com>
Authored: Fri Apr 14 23:54:59 2017 -0700
Committer: Eugene Kirpichov <ki...@google.com>
Committed: Fri Apr 14 23:54:59 2017 -0700

----------------------------------------------------------------------
 .../operators/ApexGroupByKeyOperator.java       | 225 ++++---------
 .../beam/runners/core/AssignWindowsDoFn.java    |  78 -----
 .../apache/beam/runners/core/DoFnAdapters.java  | 328 -------------------
 .../apache/beam/runners/core/DoFnRunners.java   |   2 +-
 .../GroupAlsoByWindowViaOutputBufferDoFn.java   |  17 +-
 .../core/GroupAlsoByWindowViaWindowSetDoFn.java |   7 +-
 .../GroupAlsoByWindowViaWindowSetNewDoFn.java   |  11 +-
 .../core/GroupAlsoByWindowsAggregators.java     |  28 ++
 .../runners/core/GroupAlsoByWindowsDoFn.java    |  46 ---
 .../core/LateDataDroppingDoFnRunner.java        |   3 +-
 ...roupAlsoByWindowViaOutputBufferDoFnTest.java |   4 +-
 .../core/GroupAlsoByWindowsProperties.java      |  27 +-
 .../beam/runners/core/ReduceFnTester.java       |   3 +-
 .../GroupAlsoByWindowEvaluatorFactory.java      |   6 +-
 .../SparkGroupAlsoByWindowViaWindowSet.java     |   9 +-
 .../spark/translation/SparkAssignWindowFn.java  |   3 +-
 ...SparkGroupAlsoByWindowViaOutputBufferFn.java |   8 +-
 17 files changed, 148 insertions(+), 657 deletions(-)
----------------------------------------------------------------------



[46/50] [abbrv] beam git commit: This closes #2549

Posted by ke...@apache.org.
This closes #2549


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/fac4f3e3
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/fac4f3e3
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/fac4f3e3

Branch: refs/heads/jstorm-runner
Commit: fac4f3e3c937a76d6ca92ca6f73f8df9928ae94c
Parents: e556858 88513db
Author: Isma�l Mej�a <ie...@apache.org>
Authored: Tue Apr 18 16:12:58 2017 +0200
Committer: Isma�l Mej�a <ie...@apache.org>
Committed: Tue Apr 18 16:12:58 2017 +0200

----------------------------------------------------------------------
 runners/flink/pom.xml                           |  45 +++++---
 runners/flink/runner/pom.xml                    | 109 +++++++++++--------
 .../flink/FlinkDetachedRunnerResult.java        |   3 +-
 .../types/EncodedValueTypeInformation.java      |   9 --
 .../streaming/SingletonKeyedWorkItem.java       |   2 -
 .../streaming/SingletonKeyedWorkItemCoder.java  |   2 -
 .../wrappers/streaming/WindowDoFnOperator.java  |   3 -
 .../beam/runners/flink/PipelineOptionsTest.java |   2 +-
 8 files changed, 95 insertions(+), 80 deletions(-)
----------------------------------------------------------------------



[44/50] [abbrv] beam git commit: Remove flink-annotations dependency

Posted by ke...@apache.org.
Remove flink-annotations dependency


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/88513db3
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/88513db3
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/88513db3

Branch: refs/heads/jstorm-runner
Commit: 88513db31ee5301d96550b82b5f428b35966f2a7
Parents: 5fce8d2
Author: Isma�l Mej�a <ie...@apache.org>
Authored: Tue Apr 18 14:42:37 2017 +0200
Committer: Isma�l Mej�a <ie...@apache.org>
Committed: Tue Apr 18 16:12:47 2017 +0200

----------------------------------------------------------------------
 runners/flink/runner/pom.xml                                | 6 ------
 .../translation/types/EncodedValueTypeInformation.java      | 9 ---------
 2 files changed, 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/88513db3/runners/flink/runner/pom.xml
----------------------------------------------------------------------
diff --git a/runners/flink/runner/pom.xml b/runners/flink/runner/pom.xml
index 30f376c..95880f4 100644
--- a/runners/flink/runner/pom.xml
+++ b/runners/flink/runner/pom.xml
@@ -160,12 +160,6 @@
       <version>${flink.version}</version>
     </dependency>
 
-    <dependency>
-      <groupId>org.apache.flink</groupId>
-      <artifactId>flink-annotations</artifactId>
-      <version>${flink.version}</version>
-    </dependency>
-
     <!-- For testing -->
     <dependency>
       <groupId>org.apache.flink</groupId>

http://git-wip-us.apache.org/repos/asf/beam/blob/88513db3/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueTypeInformation.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueTypeInformation.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueTypeInformation.java
index 0315ae3..e24bf31 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueTypeInformation.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueTypeInformation.java
@@ -18,7 +18,6 @@
 package org.apache.beam.runners.flink.translation.types;
 
 import org.apache.beam.sdk.coders.Coder;
-import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.ExecutionConfig;
 import org.apache.flink.api.common.typeinfo.AtomicType;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
@@ -36,43 +35,36 @@ public class EncodedValueTypeInformation
   private static final long serialVersionUID = 1L;
 
   @Override
-  @PublicEvolving
   public boolean isBasicType() {
     return false;
   }
 
   @Override
-  @PublicEvolving
   public boolean isTupleType() {
     return false;
   }
 
   @Override
-  @PublicEvolving
   public int getArity() {
     return 0;
   }
 
   @Override
-  @PublicEvolving
   public int getTotalFields() {
     return 0;
   }
 
   @Override
-  @PublicEvolving
   public Class<byte[]> getTypeClass() {
     return byte[].class;
   }
 
   @Override
-  @PublicEvolving
   public boolean isKeyType() {
     return true;
   }
 
   @Override
-  @PublicEvolving
   public TypeSerializer<byte[]> createSerializer(ExecutionConfig executionConfig) {
     return new EncodedValueSerializer();
   }
@@ -98,7 +90,6 @@ public class EncodedValueTypeInformation
   }
 
   @Override
-  @PublicEvolving
   public TypeComparator<byte[]> createComparator(
       boolean sortOrderAscending,
       ExecutionConfig executionConfig) {


[26/50] [abbrv] beam git commit: Remove reference to the isStreaming flag

Posted by ke...@apache.org.
Remove reference to the isStreaming flag

Remove references to DataflowPipelineOptions in PipelineOptions javadoc.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/4ae4e7dc
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/4ae4e7dc
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/4ae4e7dc

Branch: refs/heads/jstorm-runner
Commit: 4ae4e7dc196d526aed3c64128db74f716f7bf38f
Parents: 8302783
Author: Thomas Groh <tg...@google.com>
Authored: Mon Apr 17 11:42:33 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Mon Apr 17 13:06:59 2017 -0700

----------------------------------------------------------------------
 .../org/apache/beam/sdk/options/PipelineOptions.java    | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/4ae4e7dc/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
index 47d5aa9..4e7bc89 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java
@@ -79,18 +79,18 @@ import org.joda.time.format.DateTimeFormatter;
  *     PipelineOptionsFactory.as(DirectOptions.class);
  *
  * // To cast from one type to another using the as(Class) method:
- * DataflowPipelineOptions dataflowPipelineOptions =
- *     directPipelineOptions.as(DataflowPipelineOptions.class);
+ * ApplicationNameOptions applicationNameOptions =
+ *     directPipelineOptions.as(ApplicationNameOptions.class);
  *
  * // Options for the same property are shared between types
- * // The statement below will print out "true"
- * System.out.println(dataflowPipelineOptions.isStreaming());
+ * // The statement below will print out the name of the enclosing class by default
+ * System.out.println(applicationNameOptions.getApplicationName());
  *
  * // Prints out registered options.
  * PipelineOptionsFactory.printHelp(System.out);
  *
- * // Prints out options which are available to be set on DataflowPipelineOptions
- * PipelineOptionsFactory.printHelp(System.out, DataflowPipelineOptions.class);
+ * // Prints out options which are available to be set on ApplicationNameOptions
+ * PipelineOptionsFactory.printHelp(System.out, ApplicationNameOptions.class);
  * }</pre>
  *
  * <h2>Defining Your Own PipelineOptions</h2>


[32/50] [abbrv] beam git commit: This closes #2536

Posted by ke...@apache.org.
This closes #2536


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/4ff244d4
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/4ff244d4
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/4ff244d4

Branch: refs/heads/jstorm-runner
Commit: 4ff244d49c678f658f437600559cbe0a99048f81
Parents: 85cfd0c 1f66fbd
Author: chamikara@google.com <ch...@google.com>
Authored: Mon Apr 17 15:02:41 2017 -0700
Committer: chamikara@google.com <ch...@google.com>
Committed: Mon Apr 17 15:02:41 2017 -0700

----------------------------------------------------------------------
 sdks/python/apache_beam/io/avroio_test.py       | 14 ++--
 .../python/apache_beam/io/concat_source_test.py | 12 ++--
 sdks/python/apache_beam/io/source_test_utils.py | 72 ++++++++++----------
 .../apache_beam/io/source_test_utils_test.py    | 20 +++---
 sdks/python/apache_beam/io/textio_test.py       | 18 ++---
 .../apache_beam/transforms/create_test.py       | 18 ++---
 6 files changed, 76 insertions(+), 78 deletions(-)
----------------------------------------------------------------------



[02/50] [abbrv] beam git commit: This closes #2539

Posted by ke...@apache.org.
This closes #2539


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/89ff0b14
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/89ff0b14
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/89ff0b14

Branch: refs/heads/jstorm-runner
Commit: 89ff0b145fac64ec6f5a38d8df708d226890c7ac
Parents: cf9ac45 bd79f4d
Author: Ahmet Altay <al...@altay-macbookpro2.roam.corp.google.com>
Authored: Fri Apr 14 13:06:32 2017 -0700
Committer: Ahmet Altay <al...@altay-macbookpro2.roam.corp.google.com>
Committed: Fri Apr 14 13:06:32 2017 -0700

----------------------------------------------------------------------
 sdks/python/apache_beam/io/concat_source.py     | 74 +++++++++-----------
 .../apache_beam/io/filebasedsource_test.py      |  2 +-
 sdks/python/apache_beam/io/fileio.py            |  6 +-
 sdks/python/apache_beam/io/filesystems_util.py  |  3 +-
 sdks/python/apache_beam/io/gcp/bigquery.py      | 13 ++--
 .../io/gcp/datastore/v1/datastoreio.py          |  4 +-
 .../apache_beam/io/gcp/datastore/v1/helper.py   | 16 ++---
 .../io/gcp/datastore/v1/query_splitter.py       |  2 +-
 sdks/python/apache_beam/io/gcp/gcsfilesystem.py |  3 +-
 .../io/gcp/tests/bigquery_matcher.py            |  3 +-
 sdks/python/apache_beam/io/iobase.py            |  7 +-
 sdks/python/apache_beam/io/localfilesystem.py   |  3 +-
 sdks/python/apache_beam/io/range_trackers.py    | 19 +++--
 sdks/python/apache_beam/io/source_test_utils.py |  7 +-
 sdks/python/apache_beam/io/textio.py            | 13 ++--
 sdks/python/apache_beam/metrics/cells.py        | 28 ++++----
 sdks/python/apache_beam/metrics/execution.py    |  3 +-
 sdks/python/apache_beam/metrics/metric.py       |  9 +--
 sdks/python/apache_beam/runners/common.py       |  9 +--
 .../runners/dataflow/dataflow_metrics_test.py   |  3 +-
 .../runners/dataflow/dataflow_runner.py         |  6 +-
 .../runners/dataflow/internal/apiclient.py      |  8 +--
 .../runners/dataflow/internal/dependency.py     |  6 +-
 .../runners/dataflow/test_dataflow_runner.py    |  4 --
 .../runners/direct/bundle_factory.py            | 14 ++--
 .../runners/direct/evaluation_context.py        | 10 +--
 .../apache_beam/runners/direct/executor.py      |  9 +--
 .../runners/direct/transform_evaluator.py       |  7 --
 sdks/python/apache_beam/runners/runner.py       |  3 +-
 .../apache_beam/tests/pipeline_verifiers.py     |  7 +-
 sdks/python/apache_beam/transforms/combiners.py | 58 +++++++--------
 .../apache_beam/transforms/combiners_test.py    |  4 +-
 sdks/python/apache_beam/typehints/decorators.py |  3 +-
 sdks/python/apache_beam/typehints/typehints.py  |  3 +-
 34 files changed, 149 insertions(+), 220 deletions(-)
----------------------------------------------------------------------



[07/50] [abbrv] beam git commit: [BEAM-1964] Fix lint issues and pylint upgrade

Posted by ke...@apache.org.
[BEAM-1964] Fix lint issues and pylint upgrade


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/e03cc498
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/e03cc498
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/e03cc498

Branch: refs/heads/jstorm-runner
Commit: e03cc498652ea57eba706f563b0b676edf57f78c
Parents: f30d5b9
Author: Sourabh Bajaj <so...@google.com>
Authored: Fri Apr 14 15:54:51 2017 -0700
Committer: Ahmet Altay <al...@google.com>
Committed: Fri Apr 14 16:43:50 2017 -0700

----------------------------------------------------------------------
 sdks/python/.pylintrc                                        | 5 ++++-
 .../python/apache_beam/examples/cookbook/group_with_coder.py | 6 ++++--
 sdks/python/apache_beam/internal/pickler.py                  | 8 ++++----
 .../apache_beam/io/gcp/datastore/v1/datastoreio_test.py      | 4 ++--
 sdks/python/apache_beam/io/textio.py                         | 2 +-
 sdks/python/apache_beam/transforms/combiners.py              | 4 ++--
 sdks/python/apache_beam/transforms/core.py                   | 2 +-
 sdks/python/apache_beam/transforms/ptransform.py             | 2 +-
 sdks/python/apache_beam/typehints/typehints_test.py          | 2 +-
 sdks/python/apache_beam/utils/retry.py                       | 2 +-
 sdks/python/tox.ini                                          | 2 +-
 11 files changed, 22 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/e03cc498/sdks/python/.pylintrc
----------------------------------------------------------------------
diff --git a/sdks/python/.pylintrc b/sdks/python/.pylintrc
index 7a0611a..e29e15b 100644
--- a/sdks/python/.pylintrc
+++ b/sdks/python/.pylintrc
@@ -94,6 +94,7 @@ disable =
   import-error,
   import-self,
   invalid-name,
+  invalid-unary-operand-type,
   locally-disabled,
   locally-enabled,
   misplaced-bare-raise,
@@ -104,6 +105,8 @@ disable =
   no-self-use,
   no-value-for-parameter,
   not-callable,
+  # Re-enable the context manager check once https://github.com/PyCQA/pylint/issues/782 is fixed
+  not-context-manager,
   pointless-statement,
   protected-access,
   raising-non-exception,
@@ -115,6 +118,7 @@ disable =
   similarities,
   simplifiable-if-statement,
   super-init-not-called,
+  super-on-old-class,
   undefined-variable,
   unexpected-keyword-arg,
   unidiomatic-typecheck,
@@ -124,7 +128,6 @@ disable =
   unused-wildcard-import,
   wildcard-import,
 
-
 [REPORTS]
 # Tells whether to display a full report or only the messages
 reports=no

http://git-wip-us.apache.org/repos/asf/beam/blob/e03cc498/sdks/python/apache_beam/examples/cookbook/group_with_coder.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/examples/cookbook/group_with_coder.py b/sdks/python/apache_beam/examples/cookbook/group_with_coder.py
index f6f2108..cb675bd 100644
--- a/sdks/python/apache_beam/examples/cookbook/group_with_coder.py
+++ b/sdks/python/apache_beam/examples/cookbook/group_with_coder.py
@@ -78,9 +78,11 @@ def get_players(descriptor):
   return Player(name), int(points)
 
 
-def run(argv=sys.argv[1:]):
+def run(args=None):
   """Runs the workflow computing total points from a collection of matches."""
 
+  if args is None:
+    args = sys.argv[1:]
   parser = argparse.ArgumentParser()
   parser.add_argument('--input',
                       required=True,
@@ -88,7 +90,7 @@ def run(argv=sys.argv[1:]):
   parser.add_argument('--output',
                       required=True,
                       help='Output file to write results to.')
-  known_args, pipeline_args = parser.parse_known_args(argv)
+  known_args, pipeline_args = parser.parse_known_args(args)
   # We use the save_main_session option because one or more DoFn's in this
   # workflow rely on global context (e.g., a module imported at module level).
   pipeline_options = PipelineOptions(pipeline_args)

http://git-wip-us.apache.org/repos/asf/beam/blob/e03cc498/sdks/python/apache_beam/internal/pickler.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/internal/pickler.py b/sdks/python/apache_beam/internal/pickler.py
index a4ab7b9..3f3f657 100644
--- a/sdks/python/apache_beam/internal/pickler.py
+++ b/sdks/python/apache_beam/internal/pickler.py
@@ -184,12 +184,12 @@ logging.getLogger('dill').setLevel(logging.WARN)
 def dumps(o, enable_trace=True):
   try:
     s = dill.dumps(o)
-  except Exception as e:      # pylint: disable=broad-except
+  except Exception:      # pylint: disable=broad-except
     if enable_trace:
       dill.dill._trace(True)  # pylint: disable=protected-access
       s = dill.dumps(o)
     else:
-      raise e
+      raise
   finally:
     dill.dill._trace(False)  # pylint: disable=protected-access
 
@@ -210,12 +210,12 @@ def loads(encoded, enable_trace=True):
 
   try:
     return dill.loads(s)
-  except Exception as e:          # pylint: disable=broad-except
+  except Exception:          # pylint: disable=broad-except
     if enable_trace:
       dill.dill._trace(True)   # pylint: disable=protected-access
       return dill.loads(s)
     else:
-      raise e
+      raise
   finally:
     dill.dill._trace(False)  # pylint: disable=protected-access
 

http://git-wip-us.apache.org/repos/asf/beam/blob/e03cc498/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio_test.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio_test.py b/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio_test.py
index 3121d3a..8eed0f8 100644
--- a/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio_test.py
+++ b/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio_test.py
@@ -27,7 +27,7 @@ from apache_beam.io.gcp.datastore.v1.datastoreio import ReadFromDatastore
 from apache_beam.io.gcp.datastore.v1.datastoreio import WriteToDatastore
 
 # Protect against environments where datastore library is not available.
-# pylint: disable=wrong-import-order, wrong-import-position
+# pylint: disable=wrong-import-order, wrong-import-position, ungrouped-imports
 try:
   from google.cloud.proto.datastore.v1 import datastore_pb2
   from google.cloud.proto.datastore.v1 import query_pb2
@@ -35,7 +35,7 @@ try:
   from googledatastore import helper as datastore_helper
 except ImportError:
   datastore_pb2 = None
-# pylint: enable=wrong-import-order, wrong-import-position
+# pylint: enable=wrong-import-order, wrong-import-position, ungrouped-imports
 
 
 @unittest.skipIf(datastore_pb2 is None, 'GCP dependencies are not installed')

http://git-wip-us.apache.org/repos/asf/beam/blob/e03cc498/sdks/python/apache_beam/io/textio.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/textio.py b/sdks/python/apache_beam/io/textio.py
index b6a24b0..f2c3d34 100644
--- a/sdks/python/apache_beam/io/textio.py
+++ b/sdks/python/apache_beam/io/textio.py
@@ -160,7 +160,7 @@ class _TextSource(filebasedsource.FileBasedSource):
         # followed by a new line character. Since such a record is at the last
         # position of a file, it should not be a part of the considered range.
         # We do this check to ignore such records.
-        if len(record) == 0 and num_bytes_to_next_record < 0:
+        if len(record) == 0 and num_bytes_to_next_record < 0:  # pylint: disable=len-as-condition
           break
 
         # Record separator must be larger than zero bytes.

http://git-wip-us.apache.org/repos/asf/beam/blob/e03cc498/sdks/python/apache_beam/transforms/combiners.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/transforms/combiners.py b/sdks/python/apache_beam/transforms/combiners.py
index f812832..fa0742d 100644
--- a/sdks/python/apache_beam/transforms/combiners.py
+++ b/sdks/python/apache_beam/transforms/combiners.py
@@ -463,7 +463,7 @@ class SingleInputTupleCombineFn(_TupleCombineFnBase):
 class ToList(ptransform.PTransform):
   """A global CombineFn that condenses a PCollection into a single list."""
 
-  def __init__(self, label='ToList'):
+  def __init__(self, label='ToList'):  # pylint: disable=useless-super-delegation
     super(ToList, self).__init__(label)
 
   def expand(self, pcoll):
@@ -497,7 +497,7 @@ class ToDict(ptransform.PTransform):
   will be present in the resulting dict.
   """
 
-  def __init__(self, label='ToDict'):
+  def __init__(self, label='ToDict'):  # pylint: disable=useless-super-delegation
     super(ToDict, self).__init__(label)
 
   def expand(self, pcoll):

http://git-wip-us.apache.org/repos/asf/beam/blob/e03cc498/sdks/python/apache_beam/transforms/core.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/transforms/core.py b/sdks/python/apache_beam/transforms/core.py
index b1a33ea..3def9ef 100644
--- a/sdks/python/apache_beam/transforms/core.py
+++ b/sdks/python/apache_beam/transforms/core.py
@@ -1172,7 +1172,7 @@ class Windowing(object):
 
   def __init__(self, windowfn, triggerfn=None, accumulation_mode=None,
                output_time_fn=None):
-    global AccumulationMode, DefaultTrigger
+    global AccumulationMode, DefaultTrigger  # pylint: disable=global-variable-not-assigned
     # pylint: disable=wrong-import-order, wrong-import-position
     from apache_beam.transforms.trigger import AccumulationMode, DefaultTrigger
     # pylint: enable=wrong-import-order, wrong-import-position

http://git-wip-us.apache.org/repos/asf/beam/blob/e03cc498/sdks/python/apache_beam/transforms/ptransform.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/transforms/ptransform.py b/sdks/python/apache_beam/transforms/ptransform.py
index 0ac8b5b..9b7a37f 100644
--- a/sdks/python/apache_beam/transforms/ptransform.py
+++ b/sdks/python/apache_beam/transforms/ptransform.py
@@ -656,7 +656,7 @@ class _NamedPTransform(PTransform):
     super(_NamedPTransform, self).__init__(label)
     self.transform = transform
 
-  def __ror__(self, pvalueish):
+  def __ror__(self, pvalueish, _unused=None):
     return self.transform.__ror__(pvalueish, self.label)
 
   def expand(self, pvalue):

http://git-wip-us.apache.org/repos/asf/beam/blob/e03cc498/sdks/python/apache_beam/typehints/typehints_test.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/typehints/typehints_test.py b/sdks/python/apache_beam/typehints/typehints_test.py
index 4e82fbc..8ebe3e4 100644
--- a/sdks/python/apache_beam/typehints/typehints_test.py
+++ b/sdks/python/apache_beam/typehints/typehints_test.py
@@ -49,7 +49,7 @@ def check_type_hints(f):
   @functools.wraps(f)
   def wrapper(*args, **kwargs):
     hints = get_type_hints(f)
-    if hints.input_types:
+    if hints.input_types:  # pylint: disable=too-many-nested-blocks
       input_hints = getcallargs_forhints(
           f, *hints.input_types[0], **hints.input_types[1])
       inputs = inspect.getcallargs(f, *args, **kwargs)

http://git-wip-us.apache.org/repos/asf/beam/blob/e03cc498/sdks/python/apache_beam/utils/retry.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/utils/retry.py b/sdks/python/apache_beam/utils/retry.py
index 4b137e2..2c32f0f 100644
--- a/sdks/python/apache_beam/utils/retry.py
+++ b/sdks/python/apache_beam/utils/retry.py
@@ -176,7 +176,7 @@ def with_exponential_backoff(
               sleep_interval = retry_intervals.next()
             except StopIteration:
               # Re-raise the original exception since we finished the retries.
-              raise exn, None, exn_traceback
+              raise exn, None, exn_traceback  # pylint: disable=raising-bad-type
 
             logger(
                 'Retry with exponential backoff: waiting for %s seconds before '

http://git-wip-us.apache.org/repos/asf/beam/blob/e03cc498/sdks/python/tox.ini
----------------------------------------------------------------------
diff --git a/sdks/python/tox.ini b/sdks/python/tox.ini
index 63e197d..6660919 100644
--- a/sdks/python/tox.ini
+++ b/sdks/python/tox.ini
@@ -73,7 +73,7 @@ passenv = TRAVIS*
 deps=
   nose==1.3.7
   pep8==1.7.0
-  pylint==1.6.5
+  pylint==1.7.0
 commands =
   pip install -e .[test]
   {toxinidir}/run_pylint.sh


[24/50] [abbrv] beam git commit: This closes #2533

Posted by ke...@apache.org.
This closes #2533


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/8302783c
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/8302783c
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/8302783c

Branch: refs/heads/jstorm-runner
Commit: 8302783c3239ddcf8b5259b4aff8ee33673d02cd
Parents: 14e5cd2 f83a7d5
Author: Eugene Kirpichov <ki...@google.com>
Authored: Mon Apr 17 09:58:58 2017 -0700
Committer: Eugene Kirpichov <ki...@google.com>
Committed: Mon Apr 17 09:58:58 2017 -0700

----------------------------------------------------------------------
 .../beam/sdk/transforms/FlatMapElements.java    |  4 ++--
 .../apache/beam/sdk/transforms/MapElements.java |  4 ++--
 .../org/apache/beam/sdk/transforms/ParDo.java   |  5 ++--
 .../apache/beam/sdk/values/TypeDescriptors.java | 25 ++++++++++----------
 4 files changed, 18 insertions(+), 20 deletions(-)
----------------------------------------------------------------------



[16/50] [abbrv] beam git commit: This closes #2406

Posted by ke...@apache.org.
This closes #2406


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/9b8f2309
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/9b8f2309
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/9b8f2309

Branch: refs/heads/jstorm-runner
Commit: 9b8f23095665cc7d7872969290a01847de8071b6
Parents: f7d727c c62d698
Author: Jean-Baptiste Onofr� <jb...@apache.org>
Authored: Mon Apr 17 11:03:51 2017 +0200
Committer: Jean-Baptiste Onofr� <jb...@apache.org>
Committed: Mon Apr 17 11:03:51 2017 +0200

----------------------------------------------------------------------
 .../java/org/apache/beam/sdk/io/UnboundedSource.java   | 13 +++++++++++++
 1 file changed, 13 insertions(+)
----------------------------------------------------------------------



[09/50] [abbrv] beam git commit: This closes #2501

Posted by ke...@apache.org.
This closes #2501


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/fdbadfc9
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/fdbadfc9
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/fdbadfc9

Branch: refs/heads/jstorm-runner
Commit: fdbadfc9cceecf645f2325777ab32f1cb3041953
Parents: 3c2b855 f3b4960
Author: Thomas Groh <tg...@google.com>
Authored: Fri Apr 14 16:52:03 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Fri Apr 14 16:52:03 2017 -0700

----------------------------------------------------------------------
 .../apache/beam/runners/apex/ApexRunner.java    |  32 +++--
 .../DeduplicatedFlattenFactory.java             |  63 +++++----
 .../EmptyFlattenAsCreateFactory.java            |  20 ++-
 .../core/construction/PTransformMatchers.java   |   2 -
 .../construction/PTransformReplacements.java    |  69 ++++++++++
 .../core/construction/PrimitiveCreate.java      |  13 +-
 .../SingleInputOutputOverrideFactory.java       |   9 +-
 .../UnsupportedOverrideFactory.java             |  14 +-
 .../DeduplicatedFlattenFactoryTest.java         |  18 +--
 .../EmptyFlattenAsCreateFactoryTest.java        |  36 ++++-
 .../PTransformReplacementsTest.java             | 131 +++++++++++++++++++
 .../SingleInputOutputOverrideFactoryTest.java   |  31 ++---
 .../UnsupportedOverrideFactoryTest.java         |  11 +-
 ...ectGBKIntoKeyedWorkItemsOverrideFactory.java |  16 ++-
 .../direct/DirectGroupByKeyOverrideFactory.java |  14 +-
 .../direct/ParDoMultiOverrideFactory.java       |  22 ++--
 .../direct/TestStreamEvaluatorFactory.java      |  14 +-
 .../runners/direct/ViewOverrideFactory.java     |  18 +--
 .../direct/WriteWithShardingFactory.java        |  16 +--
 .../DirectGroupByKeyOverrideFactoryTest.java    |  12 +-
 .../direct/ParDoMultiOverrideFactoryTest.java   |  45 -------
 .../direct/TestStreamEvaluatorFactoryTest.java  |  12 --
 .../runners/direct/ViewOverrideFactoryTest.java |  42 ++++--
 .../direct/WriteWithShardingFactoryTest.java    |  23 ++--
 .../flink/FlinkStreamingPipelineTranslator.java |  56 ++++----
 .../dataflow/BatchStatefulParDoOverrides.java   |  42 +++---
 .../runners/dataflow/BatchViewOverrides.java    |  17 ++-
 .../beam/runners/dataflow/DataflowRunner.java   |  92 ++++++-------
 .../dataflow/PrimitiveParDoSingleFactory.java   |  15 ++-
 .../dataflow/ReshuffleOverrideFactory.java      |  12 +-
 .../dataflow/StreamingViewOverrides.java        |  14 +-
 .../PrimitiveParDoSingleFactoryTest.java        |  59 +++++++--
 .../beam/runners/spark/TestSparkRunner.java     |  14 +-
 .../main/java/org/apache/beam/sdk/Pipeline.java |  15 ++-
 .../sdk/runners/PTransformOverrideFactory.java  |  33 +++--
 .../beam/sdk/transforms/AppliedPTransform.java  |   5 +
 .../java/org/apache/beam/sdk/PipelineTest.java  |  33 ++---
 37 files changed, 675 insertions(+), 415 deletions(-)
----------------------------------------------------------------------



[08/50] [abbrv] beam git commit: This closes #2542

Posted by ke...@apache.org.
This closes #2542


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/3c2b855f
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/3c2b855f
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/3c2b855f

Branch: refs/heads/jstorm-runner
Commit: 3c2b855f5aa8a5e93076102e76ea21f9e36a96c7
Parents: f30d5b9 e03cc49
Author: Ahmet Altay <al...@google.com>
Authored: Fri Apr 14 16:43:52 2017 -0700
Committer: Ahmet Altay <al...@google.com>
Committed: Fri Apr 14 16:43:52 2017 -0700

----------------------------------------------------------------------
 sdks/python/.pylintrc                                        | 5 ++++-
 .../python/apache_beam/examples/cookbook/group_with_coder.py | 6 ++++--
 sdks/python/apache_beam/internal/pickler.py                  | 8 ++++----
 .../apache_beam/io/gcp/datastore/v1/datastoreio_test.py      | 4 ++--
 sdks/python/apache_beam/io/textio.py                         | 2 +-
 sdks/python/apache_beam/transforms/combiners.py              | 4 ++--
 sdks/python/apache_beam/transforms/core.py                   | 2 +-
 sdks/python/apache_beam/transforms/ptransform.py             | 2 +-
 sdks/python/apache_beam/typehints/typehints_test.py          | 2 +-
 sdks/python/apache_beam/utils/retry.py                       | 2 +-
 sdks/python/tox.ini                                          | 2 +-
 11 files changed, 22 insertions(+), 17 deletions(-)
----------------------------------------------------------------------



[29/50] [abbrv] beam git commit: Add no-else return to pylintrc

Posted by ke...@apache.org.
Add no-else return to pylintrc


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/efa82fab
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/efa82fab
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/efa82fab

Branch: refs/heads/jstorm-runner
Commit: efa82fab185802a297a5b2f42919615b0c87d8f2
Parents: 32a576a
Author: Sourabh Bajaj <so...@google.com>
Authored: Mon Apr 17 11:16:07 2017 -0700
Committer: Ahmet Altay <al...@google.com>
Committed: Mon Apr 17 14:34:09 2017 -0700

----------------------------------------------------------------------
 sdks/python/.pylintrc | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/efa82fab/sdks/python/.pylintrc
----------------------------------------------------------------------
diff --git a/sdks/python/.pylintrc b/sdks/python/.pylintrc
index e29e15b..429ebdb 100644
--- a/sdks/python/.pylintrc
+++ b/sdks/python/.pylintrc
@@ -100,6 +100,7 @@ disable =
   misplaced-bare-raise,
   missing-docstring,
   multiple-statements,
+  no-else-return,
   no-member,
   no-name-in-module,
   no-self-use,


[04/50] [abbrv] beam git commit: Rename DoFn.Context#sideOutput to output

Posted by ke...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
index 211dfd9..f752b1c 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java
@@ -55,12 +55,12 @@ public class TypedPValueTest {
   }
 
   private PCollectionTuple buildPCollectionTupleWithTags(
-      TupleTag<Integer> mainOutputTag, TupleTag<Integer> sideOutputTag) {
+      TupleTag<Integer> mainOutputTag, TupleTag<Integer> additionalOutputTag) {
     PCollection<Integer> input = p.apply(Create.of(1, 2, 3));
     PCollectionTuple tuple = input.apply(
         ParDo
         .of(new IdentityDoFn())
-        .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag)));
+        .withOutputTags(mainOutputTag, TupleTagList.of(additionalOutputTag)));
     return tuple;
   }
 
@@ -69,11 +69,11 @@ public class TypedPValueTest {
   }
 
   @Test
-  public void testUntypedSideOutputTupleTagGivesActionableMessage() {
+  public void testUntypedOutputTupleTagGivesActionableMessage() {
     TupleTag<Integer> mainOutputTag = new TupleTag<Integer>() {};
-    // untypedSideOutputTag did not use anonymous subclass.
-    TupleTag<Integer> untypedSideOutputTag = new TupleTag<Integer>();
-    PCollectionTuple tuple = buildPCollectionTupleWithTags(mainOutputTag, untypedSideOutputTag);
+    // untypedOutputTag did not use anonymous subclass.
+    TupleTag<Integer> untypedOutputTag = new TupleTag<Integer>();
+    PCollectionTuple tuple = buildPCollectionTupleWithTags(mainOutputTag, untypedOutputTag);
 
     thrown.expect(IllegalStateException.class);
     thrown.expectMessage("No Coder has been manually specified");
@@ -84,15 +84,15 @@ public class TypedPValueTest {
     thrown.expectMessage(
         containsString("Building a Coder from the fallback CoderProvider failed"));
 
-    tuple.get(untypedSideOutputTag).getCoder();
+    tuple.get(untypedOutputTag).getCoder();
   }
 
   @Test
-  public void testStaticFactorySideOutputTupleTagGivesActionableMessage() {
+  public void testStaticFactoryOutputTupleTagGivesActionableMessage() {
     TupleTag<Integer> mainOutputTag = new TupleTag<Integer>() {};
-    // untypedSideOutputTag constructed from a static factory method.
-    TupleTag<Integer> untypedSideOutputTag = makeTagStatically();
-    PCollectionTuple tuple = buildPCollectionTupleWithTags(mainOutputTag, untypedSideOutputTag);
+    // untypedOutputTag constructed from a static factory method.
+    TupleTag<Integer> untypedOutputTag = makeTagStatically();
+    PCollectionTuple tuple = buildPCollectionTupleWithTags(mainOutputTag, untypedOutputTag);
 
     thrown.expect(IllegalStateException.class);
     thrown.expectMessage("No Coder has been manually specified");
@@ -103,27 +103,27 @@ public class TypedPValueTest {
     thrown.expectMessage(
         containsString("Building a Coder from the fallback CoderProvider failed"));
 
-    tuple.get(untypedSideOutputTag).getCoder();
+    tuple.get(untypedOutputTag).getCoder();
   }
 
   @Test
-  public void testTypedSideOutputTupleTag() {
+  public void testTypedOutputTupleTag() {
     TupleTag<Integer> mainOutputTag = new TupleTag<Integer>() {};
-    // typedSideOutputTag was constructed with compile-time type information.
-    TupleTag<Integer> typedSideOutputTag = new TupleTag<Integer>() {};
-    PCollectionTuple tuple = buildPCollectionTupleWithTags(mainOutputTag, typedSideOutputTag);
+    // typedOutputTag was constructed with compile-time type information.
+    TupleTag<Integer> typedOutputTag = new TupleTag<Integer>() {};
+    PCollectionTuple tuple = buildPCollectionTupleWithTags(mainOutputTag, typedOutputTag);
 
-    assertThat(tuple.get(typedSideOutputTag).getCoder(), instanceOf(VarIntCoder.class));
+    assertThat(tuple.get(typedOutputTag).getCoder(), instanceOf(VarIntCoder.class));
   }
 
   @Test
-  public void testUntypedMainOutputTagTypedSideOutputTupleTag() {
+  public void testUntypedMainOutputTagTypedOutputTupleTag() {
     // mainOutputTag is allowed to be untyped because Coder can be inferred other ways.
     TupleTag<Integer> mainOutputTag = new TupleTag<>();
-    TupleTag<Integer> typedSideOutputTag = new TupleTag<Integer>() {};
-    PCollectionTuple tuple = buildPCollectionTupleWithTags(mainOutputTag, typedSideOutputTag);
+    TupleTag<Integer> typedOutputTag = new TupleTag<Integer>() {};
+    PCollectionTuple tuple = buildPCollectionTupleWithTags(mainOutputTag, typedOutputTag);
 
-    assertThat(tuple.get(typedSideOutputTag).getCoder(), instanceOf(VarIntCoder.class));
+    assertThat(tuple.get(typedOutputTag).getCoder(), instanceOf(VarIntCoder.class));
   }
 
   // A simple class for which there should be no obvious Coder.
@@ -139,13 +139,13 @@ public class TypedPValueTest {
   }
 
   @Test
-  public void testParDoWithNoSideOutputsErrorDoesNotMentionTupleTag() {
+  public void testParDoWithNoOutputsErrorDoesNotMentionTupleTag() {
     PCollection<EmptyClass> input =
         p.apply(Create.of(1, 2, 3)).apply(ParDo.of(new EmptyClassDoFn()));
 
     thrown.expect(IllegalStateException.class);
 
-    // Output specific to ParDo TupleTag side outputs should not be present.
+    // Output specific to ParDo additional TupleTag outputs should not be present.
     thrown.expectMessage(not(containsString("erasure")));
     thrown.expectMessage(not(containsString("see TupleTag Javadoc")));
     // Instead, expect output suggesting other possible fixes.

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/fake/FakeStepContext.java
----------------------------------------------------------------------
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/fake/FakeStepContext.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/fake/FakeStepContext.java
index 6403e96..9714d72 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/fake/FakeStepContext.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/fake/FakeStepContext.java
@@ -46,7 +46,7 @@ public class FakeStepContext implements StepContext {
   }
 
   @Override
-  public void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output) {
+  public void noteOutput(TupleTag<?> tag, WindowedValue<?> output) {
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
index de105d7..bd2fba9 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
@@ -300,7 +300,7 @@ public class ProcessBundleHandlerTest {
 
   private static class TestDoFn extends DoFn<String, String> {
     private static final TupleTag<String> mainOutput = new TupleTag<>("mainOutput");
-    private static final TupleTag<String> sideOutput = new TupleTag<>("sideOutput");
+    private static final TupleTag<String> additionalOutput = new TupleTag<>("output");
 
     @StartBundle
     public void startBundle(Context context) {
@@ -310,7 +310,7 @@ public class ProcessBundleHandlerTest {
     @ProcessElement
     public void processElement(ProcessContext context) {
       context.output("MainOutput" + context.element());
-      context.sideOutput(sideOutput, "SideOutput" + context.element());
+      context.output(additionalOutput, "AdditionalOutput" + context.element());
     }
 
     @FinishBundle
@@ -321,7 +321,7 @@ public class ProcessBundleHandlerTest {
 
   /**
    * Create a DoFn that has 3 inputs (inputATarget1, inputATarget2, inputBTarget) and 2 outputs
-   * (mainOutput, sideOutput). Validate that inputs are fed to the {@link DoFn} and that outputs
+   * (mainOutput, output). Validate that inputs are fed to the {@link DoFn} and that outputs
    * are directed to the correct consumers.
    */
   @Test
@@ -329,7 +329,7 @@ public class ProcessBundleHandlerTest {
     Map<String, Message> fnApiRegistry = ImmutableMap.of(STRING_CODER_SPEC_ID, STRING_CODER_SPEC);
     String primitiveTransformId = "100L";
     long mainOutputId = 101L;
-    long sideOutputId = 102L;
+    long additionalOutputId = 102L;
 
     DoFnInfo<?, ?> doFnInfo = DoFnInfo.forFn(
         new TestDoFn(),
@@ -339,7 +339,7 @@ public class ProcessBundleHandlerTest {
         mainOutputId,
         ImmutableMap.of(
             mainOutputId, TestDoFn.mainOutput,
-            sideOutputId, TestDoFn.sideOutput));
+            additionalOutputId, TestDoFn.additionalOutput));
     BeamFnApi.FunctionSpec functionSpec = BeamFnApi.FunctionSpec.newBuilder()
         .setId("1L")
         .setUrn(JAVA_DO_FN_URN)
@@ -372,25 +372,25 @@ public class ProcessBundleHandlerTest {
         .putOutputs(Long.toString(mainOutputId), BeamFnApi.PCollection.newBuilder()
             .setCoderReference(STRING_CODER_SPEC_ID)
             .build())
-        .putOutputs(Long.toString(sideOutputId), BeamFnApi.PCollection.newBuilder()
+        .putOutputs(Long.toString(additionalOutputId), BeamFnApi.PCollection.newBuilder()
             .setCoderReference(STRING_CODER_SPEC_ID)
             .build())
         .build();
 
     List<WindowedValue<String>> mainOutputValues = new ArrayList<>();
-    List<WindowedValue<String>> sideOutputValues = new ArrayList<>();
+    List<WindowedValue<String>> additionalOutputValues = new ArrayList<>();
     BeamFnApi.Target mainOutputTarget = BeamFnApi.Target.newBuilder()
         .setPrimitiveTransformReference(primitiveTransformId)
         .setName(Long.toString(mainOutputId))
         .build();
-    BeamFnApi.Target sideOutputTarget = BeamFnApi.Target.newBuilder()
+    BeamFnApi.Target additionalOutputTarget = BeamFnApi.Target.newBuilder()
         .setPrimitiveTransformReference(primitiveTransformId)
-        .setName(Long.toString(sideOutputId))
+        .setName(Long.toString(additionalOutputId))
         .build();
     Multimap<BeamFnApi.Target, ThrowingConsumer<WindowedValue<String>>> existingConsumers =
         ImmutableMultimap.of(
             mainOutputTarget, mainOutputValues::add,
-            sideOutputTarget, sideOutputValues::add);
+            additionalOutputTarget, additionalOutputValues::add);
     Multimap<BeamFnApi.Target, ThrowingConsumer<WindowedValue<String>>> newConsumers =
         HashMultimap.create();
     List<ThrowingRunnable> startFunctions = new ArrayList<>();
@@ -422,12 +422,12 @@ public class ProcessBundleHandlerTest {
         valueInGlobalWindow("MainOutputA1"),
         valueInGlobalWindow("MainOutputA2"),
         valueInGlobalWindow("MainOutputB")));
-    assertThat(sideOutputValues, contains(
-        valueInGlobalWindow("SideOutputA1"),
-        valueInGlobalWindow("SideOutputA2"),
-        valueInGlobalWindow("SideOutputB")));
+    assertThat(additionalOutputValues, contains(
+        valueInGlobalWindow("AdditionalOutputA1"),
+        valueInGlobalWindow("AdditionalOutputA2"),
+        valueInGlobalWindow("AdditionalOutputB")));
     mainOutputValues.clear();
-    sideOutputValues.clear();
+    additionalOutputValues.clear();
 
     Iterables.getOnlyElement(finishFunctions).run();
     assertThat(mainOutputValues, contains(valueInGlobalWindow("FinishBundle")));

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WritePartition.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WritePartition.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WritePartition.java
index f4bf198..1b6492e 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WritePartition.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WritePartition.java
@@ -61,7 +61,7 @@ class WritePartition extends DoFn<String, KV<Long, List<String>>> {
       KV<String, Long> fileResult = results.get(i);
       if (currNumFiles + 1 > Write.MAX_NUM_FILES
           || currSizeBytes + fileResult.getValue() > Write.MAX_SIZE_BYTES) {
-        c.sideOutput(multiPartitionsTag, KV.of(++partitionId, currResults));
+        c.output(multiPartitionsTag, KV.of(++partitionId, currResults));
         currResults = Lists.newArrayList();
         currNumFiles = 0;
         currSizeBytes = 0;
@@ -71,9 +71,9 @@ class WritePartition extends DoFn<String, KV<Long, List<String>>> {
       currResults.add(fileResult.getKey());
     }
     if (partitionId == 0) {
-      c.sideOutput(singlePartitionTag, KV.of(++partitionId, currResults));
+      c.output(singlePartitionTag, KV.of(++partitionId, currResults));
     } else {
-      c.sideOutput(multiPartitionsTag, KV.of(++partitionId, currResults));
+      c.output(multiPartitionsTag, KV.of(++partitionId, currResults));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
index 92ab204..2a2bf91 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java
@@ -2107,9 +2107,9 @@ public class BigQueryIOTest implements Serializable {
 
     List<KV<Long, List<String>>> partitions;
     if (expectedNumPartitions > 1) {
-      partitions = tester.takeSideOutputElements(multiPartitionsTag);
+      partitions = tester.takeOutputElements(multiPartitionsTag);
     } else {
-      partitions = tester.takeSideOutputElements(singlePartitionTag);
+      partitions = tester.takeOutputElements(singlePartitionTag);
     }
     List<Long> partitionIds = Lists.newArrayList();
     List<String> partitionFileNames = Lists.newArrayList();


[23/50] [abbrv] beam git commit: Javadoc fixups after style guide changes

Posted by ke...@apache.org.
Javadoc fixups after style guide changes


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/f83a7d58
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/f83a7d58
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/f83a7d58

Branch: refs/heads/jstorm-runner
Commit: f83a7d58f90db0a92b810dadbf85fe5b1eb2f057
Parents: 14e5cd2
Author: Eugene Kirpichov <ki...@google.com>
Authored: Thu Apr 13 16:55:12 2017 -0700
Committer: Eugene Kirpichov <ki...@google.com>
Committed: Mon Apr 17 09:58:47 2017 -0700

----------------------------------------------------------------------
 .../beam/sdk/transforms/FlatMapElements.java    |  4 ++--
 .../apache/beam/sdk/transforms/MapElements.java |  4 ++--
 .../org/apache/beam/sdk/transforms/ParDo.java   |  5 ++--
 .../apache/beam/sdk/values/TypeDescriptors.java | 25 ++++++++++----------
 4 files changed, 18 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/f83a7d58/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
index 0983165..a8a94f9 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java
@@ -103,8 +103,8 @@ extends PTransform<PCollection<? extends InputT>, PCollection<OutputT>> {
    * <p>Example of use in Java 8:
    * <pre>{@code
    * PCollection<String> words = lines.apply(
-   *     FlatMapElements.via((String line) -> Arrays.asList(line.split(" ")))
-   *         .withOutputType(new TypeDescriptor<String>(){});
+   *     FlatMapElements.into(TypeDescriptors.strings())
+   *                    .via((String line) -> Arrays.asList(line.split(" ")))
    * }</pre>
    *
    * <p>In Java 7, the overload {@link #via(SimpleFunction)} is more concise as the output type

http://git-wip-us.apache.org/repos/asf/beam/blob/f83a7d58/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
index 82cf753..792a6d5 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapElements.java
@@ -95,8 +95,8 @@ extends PTransform<PCollection<? extends InputT>, PCollection<OutputT>> {
    *
    * <pre>{@code
    * PCollection<Integer> wordLengths = words.apply(
-   *     MapElements.via((String word) -> word.length())
-   *         .withOutputType(new TypeDescriptor<Integer>() {});
+   *     MapElements.into(TypeDescriptors.integers())
+   *                .via((String word) -> word.length()));
    * }</pre>
    *
    * <p>In Java 7, the overload {@link #via(SimpleFunction)} is more concise as the output type

http://git-wip-us.apache.org/repos/asf/beam/blob/f83a7d58/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
index e3777ac..c0633b6 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
@@ -169,8 +169,7 @@ import org.apache.beam.sdk.values.TypedPValue;
  * final PCollectionView<Integer> maxWordLengthCutOffView =
  *     maxWordLengthCutOff.apply(View.<Integer>asSingleton());
  * PCollection<String> wordsBelowCutOff =
- *     words.apply(ParDo.withSideInputs(maxWordLengthCutOffView)
- *                      .of(new DoFn<String, String>() {
+ *     words.apply(ParDo.of(new DoFn<String, String>() {
  *        {@literal @}ProcessElement
  *         public void processElement(ProcessContext c) {
  *           String word = c.element();
@@ -178,7 +177,7 @@ import org.apache.beam.sdk.values.TypedPValue;
  *           if (word.length() <= lengthCutOff) {
  *             c.output(word);
  *           }
- *         }}));
+ *         }}).withSideInputs(maxWordLengthCutOffView));
  * }</pre>
  *
  * <h2>Additional Outputs</h2>

http://git-wip-us.apache.org/repos/asf/beam/blob/f83a7d58/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptors.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptors.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptors.java
index b6b1a1a..a4626c9 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptors.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptors.java
@@ -184,10 +184,9 @@ public class TypeDescriptors {
    * <pre>
    * {@code
    * PCollection<String> words = ...;
-   * PCollection<KV<String, String>> words = words.apply(
-   *            FlatMapElements.via(...)
-   *            .withOutputType(
-   *              TypeDescriptors.kv(TypeDescriptors.strings(), TypeDescriptors.strings())));
+   * PCollection<KV<String, String>> words = words.apply(FlatMapElements
+   *         .into(TypeDescriptors.kv(TypeDescriptors.strings(), TypeDescriptors.strings()))
+   *         .via(...));
    * }
    * </pre>
    * @param key The {@link TypeDescriptor} for the key
@@ -215,9 +214,9 @@ public class TypeDescriptors {
    * <pre>
    * {@code
    * PCollection<String> words = ...;
-   * PCollection<Set<String>> words = words.apply(
-   *            FlatMapElements.via(...)
-   *            .withOutputType(TypeDescriptors.sets(TypeDescriptors.strings())));
+   * PCollection<Set<String>> words = words.apply(FlatMapElements
+   *         .into(TypeDescriptors.sets(TypeDescriptors.strings()))
+   *         .via(...));
    * }
    * </pre>
    * @param element The {@link TypeDescriptor} for the set
@@ -243,9 +242,9 @@ public class TypeDescriptors {
    * <pre>
    * {@code
    * PCollection<String> words = ...;
-   * PCollection<List<String>> words = words.apply(
-   *            FlatMapElements.via(...)
-   *            .withOutputType(TypeDescriptors.lists(TypeDescriptors.strings())));
+   * PCollection<List<String>> words = words.apply(FlatMapElements
+   *         .into(TypeDescriptors.lists(TypeDescriptors.strings()))
+   *         .via(...));
    * }
    * </pre>
    * @param element The {@link TypeDescriptor} for the list
@@ -271,9 +270,9 @@ public class TypeDescriptors {
    * <pre>
    * {@code
    * PCollection<String> words = ...;
-   * PCollection<Iterable<String>> words = words.apply(
-   *            FlatMapElements.via(...)
-   *            .withOutputType(TypeDescriptors.iterables(TypeDescriptors.strings())));
+   * PCollection<Iterable<String>> words = words.apply(FlatMapElements
+   *         .into(TypeDescriptors.iterables(TypeDescriptors.strings()))
+   *         .via(...));
    * }
    * </pre>
    * @param iterable The {@link TypeDescriptor} for the iterable


[18/50] [abbrv] beam git commit: This closes #2550

Posted by ke...@apache.org.
This closes #2550


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/946778c5
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/946778c5
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/946778c5

Branch: refs/heads/jstorm-runner
Commit: 946778c5bb114814d1ba5ebe71a27bc960f9e59b
Parents: 9b8f230 7cf06f5
Author: Jean-Baptiste Onofr� <jb...@apache.org>
Authored: Mon Apr 17 11:55:52 2017 +0200
Committer: Jean-Baptiste Onofr� <jb...@apache.org>
Committed: Mon Apr 17 11:55:52 2017 +0200

----------------------------------------------------------------------
 .../org/apache/beam/runners/apex/ApexYarnLauncher.java |  2 --
 .../apache/beam/runners/direct/ModelEnforcement.java   | 13 ++++++-------
 .../beam/runners/direct/TransformEvaluatorFactory.java | 10 +++++-----
 .../src/main/java/org/apache/beam/sdk/io/AvroIO.java   | 13 ++++++-------
 .../src/main/java/org/apache/beam/sdk/io/Sink.java     |  2 +-
 .../main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java  |  2 --
 .../apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java    |  7 +------
 7 files changed, 19 insertions(+), 30 deletions(-)
----------------------------------------------------------------------



[12/50] [abbrv] beam git commit: [BEAM-1915] Removes use of OldDoFn from Apex

Posted by ke...@apache.org.
[BEAM-1915] Removes use of OldDoFn from Apex

This is the last occurrence of OldDoFn in the Beam repository
outside of OldDoFn itself.

It's also used in the Dataflow worker, but it can be
deleted entirely once we (Dataflow team) take care of that.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/3e243881
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/3e243881
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/3e243881

Branch: refs/heads/jstorm-runner
Commit: 3e243881fe767cf30869abf5c745c26f96d66fc4
Parents: fdbadfc
Author: Eugene Kirpichov <ki...@google.com>
Authored: Mon Apr 10 22:51:16 2017 -0700
Committer: Eugene Kirpichov <ki...@google.com>
Committed: Fri Apr 14 23:34:11 2017 -0700

----------------------------------------------------------------------
 .../operators/ApexGroupByKeyOperator.java       | 225 ++++++-------------
 1 file changed, 63 insertions(+), 162 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/3e243881/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexGroupByKeyOperator.java
----------------------------------------------------------------------
diff --git a/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexGroupByKeyOperator.java b/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexGroupByKeyOperator.java
index 1697921..7d17ac6 100644
--- a/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexGroupByKeyOperator.java
+++ b/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexGroupByKeyOperator.java
@@ -42,32 +42,29 @@ import org.apache.beam.runners.apex.ApexPipelineOptions;
 import org.apache.beam.runners.apex.translation.utils.ApexStateInternals.ApexStateBackend;
 import org.apache.beam.runners.apex.translation.utils.ApexStreamTuple;
 import org.apache.beam.runners.apex.translation.utils.SerializablePipelineOptions;
-import org.apache.beam.runners.core.GroupAlsoByWindowViaWindowSetDoFn;
-import org.apache.beam.runners.core.KeyedWorkItem;
-import org.apache.beam.runners.core.KeyedWorkItems;
-import org.apache.beam.runners.core.OldDoFn;
+import org.apache.beam.runners.core.OutputWindowedValue;
+import org.apache.beam.runners.core.ReduceFnRunner;
 import org.apache.beam.runners.core.StateInternals;
 import org.apache.beam.runners.core.StateInternalsFactory;
 import org.apache.beam.runners.core.StateNamespace;
 import org.apache.beam.runners.core.SystemReduceFn;
 import org.apache.beam.runners.core.TimerInternals;
-import org.apache.beam.runners.core.WindowingInternals;
+import org.apache.beam.runners.core.construction.Triggers;
+import org.apache.beam.runners.core.triggers.ExecutableTriggerStateMachine;
+import org.apache.beam.runners.core.triggers.TriggerStateMachines;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.CoderException;
 import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.util.CoderUtils;
+import org.apache.beam.sdk.util.NullSideInputReader;
 import org.apache.beam.sdk.util.TimeDomain;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.util.WindowingStrategy;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.joda.time.Instant;
 import org.slf4j.Logger;
@@ -98,8 +95,6 @@ public class ApexGroupByKeyOperator<K, V> implements Operator {
   private final StateInternalsFactory<K> stateInternalsFactory;
   private Map<Slice, Set<TimerInternals.TimerData>> activeTimers = new HashMap<>();
 
-  private transient ProcessContext context;
-  private transient OldDoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> fn;
   private transient ApexTimerInternals timerInternals = new ApexTimerInternals();
   private Instant inputWatermark = BoundedWindow.TIMESTAMP_MIN_VALUE;
 
@@ -161,16 +156,53 @@ public class ApexGroupByKeyOperator<K, V> implements Operator {
   @Override
   public void setup(OperatorContext context) {
     this.traceTuples = ApexStreamTuple.Logging.isDebugEnabled(serializedOptions.get(), this);
-    StateInternalsFactory<K> stateInternalsFactory = new GroupByKeyStateInternalsFactory();
-    this.fn = GroupAlsoByWindowViaWindowSetDoFn.create(this.windowingStrategy,
-        stateInternalsFactory, SystemReduceFn.<K, V, BoundedWindow>buffering(this.valueCoder));
-    this.context = new ProcessContext(fn, this.timerInternals);
   }
 
   @Override
   public void teardown() {
   }
 
+
+  private ReduceFnRunner<K, V, Iterable<V>, BoundedWindow> newReduceFnRunner(K key) {
+    return new ReduceFnRunner<>(
+        key,
+        windowingStrategy,
+        ExecutableTriggerStateMachine.create(
+            TriggerStateMachines.stateMachineForTrigger(
+                Triggers.toProto(windowingStrategy.getTrigger()))),
+        stateInternalsFactory.stateInternalsForKey(key),
+        timerInternals,
+        new OutputWindowedValue<KV<K, Iterable<V>>>() {
+          @Override
+          public void outputWindowedValue(
+              KV<K, Iterable<V>> output,
+              Instant timestamp,
+              Collection<? extends BoundedWindow> windows,
+              PaneInfo pane) {
+            if (traceTuples) {
+              LOG.debug("\nemitting {} timestamp {}\n", output, timestamp);
+            }
+            ApexGroupByKeyOperator.this.output.emit(
+                ApexStreamTuple.DataTuple.of(WindowedValue.of(output, timestamp, windows, pane)));
+          }
+
+          @Override
+          public <AdditionalOutputT> void outputWindowedValue(
+              TupleTag<AdditionalOutputT> tag,
+              AdditionalOutputT output,
+              Instant timestamp,
+              Collection<? extends BoundedWindow> windows,
+              PaneInfo pane) {
+            throw new UnsupportedOperationException(
+                "GroupAlsoByWindow should not use side outputs");
+          }
+        },
+        NullSideInputReader.empty(),
+        null,
+        SystemReduceFn.<K, V, BoundedWindow>buffering(this.valueCoder),
+        serializedOptions.get());
+  }
+
   /**
    * Returns the list of timers that are ready to fire. These are the timers
    * that are registered to be triggered at a time before the current watermark.
@@ -212,13 +244,11 @@ public class ApexGroupByKeyOperator<K, V> implements Operator {
         windowedValue.getTimestamp(),
         windowedValue.getWindows(),
         windowedValue.getPane());
-
-    KeyedWorkItem<K, V> kwi = KeyedWorkItems.elementsWorkItem(
-            kv.getKey(),
-            Collections.singletonList(updatedWindowedValue));
-
-    context.setElement(kwi, getStateInternalsForKey(kwi.key()));
-    fn.processElement(context);
+    timerInternals.setKey(kv.getKey());
+    ReduceFnRunner<K, V, Iterable<V>, BoundedWindow> reduceFnRunner =
+        newReduceFnRunner(kv.getKey());
+    reduceFnRunner.processElements(Collections.singletonList(updatedWindowedValue));
+    reduceFnRunner.persist();
   }
 
   private StateInternals<K> getStateInternalsForKey(K key) {
@@ -265,158 +295,29 @@ public class ApexGroupByKeyOperator<K, V> implements Operator {
     if (!timers.isEmpty()) {
       for (Slice keyBytes : timers.keySet()) {
         K key = CoderUtils.decodeFromByteArray(keyCoder, keyBytes.buffer);
-        KeyedWorkItem<K, V> kwi = KeyedWorkItems.<K, V>timersWorkItem(key, timers.get(keyBytes));
-        context.setElement(kwi, getStateInternalsForKey(kwi.key()));
-        fn.processElement(context);
+        timerInternals.setKey(key);
+        ReduceFnRunner<K, V, Iterable<V>, BoundedWindow> reduceFnRunner = newReduceFnRunner(key);
+        reduceFnRunner.onTimers(timers.get(keyBytes));
+        reduceFnRunner.persist();
       }
     }
   }
 
-  private class ProcessContext extends GroupAlsoByWindowViaWindowSetDoFn<K, V, Iterable<V>, ?,
-      KeyedWorkItem<K, V>>.ProcessContext {
-
-    private final ApexTimerInternals timerInternals;
-    private StateInternals<K> stateInternals;
-    private KeyedWorkItem<K, V> element;
-
-    public ProcessContext(OldDoFn<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> function,
-                          ApexTimerInternals timerInternals) {
-      function.super();
-      this.timerInternals = checkNotNull(timerInternals);
-    }
-
-    public void setElement(KeyedWorkItem<K, V> element, StateInternals<K> stateForKey) {
-      this.element = element;
-      this.stateInternals = stateForKey;
-    }
-
-    @Override
-    public KeyedWorkItem<K, V> element() {
-      return this.element;
-    }
-
-    @Override
-    public Instant timestamp() {
-      throw new UnsupportedOperationException(
-          "timestamp() is not available when processing KeyedWorkItems.");
-    }
-
-    @Override
-    public PipelineOptions getPipelineOptions() {
-      return serializedOptions.get();
-    }
-
-    @Override
-    public void output(KV<K, Iterable<V>> output) {
-      throw new UnsupportedOperationException(
-          "output() is not available when processing KeyedWorkItems.");
-    }
-
-    @Override
-    public void outputWithTimestamp(KV<K, Iterable<V>> output, Instant timestamp) {
-      throw new UnsupportedOperationException(
-          "outputWithTimestamp() is not available when processing KeyedWorkItems.");
-    }
-
-    @Override
-    public PaneInfo pane() {
-      throw new UnsupportedOperationException(
-          "pane() is not available when processing KeyedWorkItems.");
-    }
-
-    @Override
-    public BoundedWindow window() {
-      throw new UnsupportedOperationException(
-          "window() is not available when processing KeyedWorkItems.");
-    }
-
-    @Override
-    public WindowingInternals<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> windowingInternals() {
-      return new WindowingInternals<KeyedWorkItem<K, V>, KV<K, Iterable<V>>>() {
-
-        @Override
-        public StateInternals<K> stateInternals() {
-          return stateInternals;
-        }
-
-        @Override
-        public void outputWindowedValue(
-            KV<K, Iterable<V>> output,
-            Instant timestamp,
-            Collection<? extends BoundedWindow> windows,
-            PaneInfo pane) {
-          if (traceTuples) {
-            LOG.debug("\nemitting {} timestamp {}\n", output, timestamp);
-          }
-          ApexGroupByKeyOperator.this.output.emit(
-              ApexStreamTuple.DataTuple.of(WindowedValue.of(output, timestamp, windows, pane)));
-        }
-
-        @Override
-        public <AdditionalOutputT> void outputWindowedValue(
-            TupleTag<AdditionalOutputT> tag,
-            AdditionalOutputT output,
-            Instant timestamp,
-            Collection<? extends BoundedWindow> windows,
-            PaneInfo pane) {
-          throw new UnsupportedOperationException(
-              "GroupAlsoByWindow should not use tagged outputs");
-        }
-
-        @Override
-        public TimerInternals timerInternals() {
-          return timerInternals;
-        }
-
-        @Override
-        public Collection<? extends BoundedWindow> windows() {
-          throw new UnsupportedOperationException("windows() is not available in Streaming mode.");
-        }
-
-        @Override
-        public PaneInfo pane() {
-          throw new UnsupportedOperationException("pane() is not available in Streaming mode.");
-        }
-
-        @Override
-        public <T> T sideInput(PCollectionView<T> view, BoundedWindow mainInputWindow) {
-          throw new RuntimeException("sideInput() is not available in Streaming mode.");
-        }
-      };
-    }
-
-    @Override
-    public <T> T sideInput(PCollectionView<T> view) {
-      throw new RuntimeException("sideInput() is not supported in Streaming mode.");
-    }
-
-    @Override
-    public <T> void output(TupleTag<T> tag, T output) {
-      throw new RuntimeException("output() is not available when grouping by window.");
-    }
-
-    @Override
-    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
-      output(tag, output);
-    }
-
-    @Override
-    public <AggInputT, AggOutputT> Aggregator<AggInputT, AggOutputT> createAggregatorInternal(
-        String name, Combine.CombineFn<AggInputT, ?, AggOutputT> combiner) {
-      throw new UnsupportedOperationException();
-    }
-  }
-
   /**
    * An implementation of Beam's {@link TimerInternals}.
    *
    */
   private class ApexTimerInternals implements TimerInternals {
+    private K key;
+
+    public void setKey(K key) {
+      this.key = key;
+    }
 
     @Deprecated
     @Override
     public void setTimer(TimerData timerData) {
-      registerActiveTimer(context.element().key(), timerData);
+      registerActiveTimer(key, timerData);
     }
 
     @Override
@@ -427,7 +328,7 @@ public class ApexGroupByKeyOperator<K, V> implements Operator {
     @Deprecated
     @Override
     public void deleteTimer(TimerData timerKey) {
-      unregisterActiveTimer(context.element().key(), timerKey);
+      unregisterActiveTimer(key, timerKey);
     }
 
     @Override


[05/50] [abbrv] beam git commit: Rename DoFn.Context#sideOutput to output

Posted by ke...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
index 9a66a2f..5496f71 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
@@ -97,7 +97,7 @@ import org.joda.time.Instant;
  * @param <InputT> the input type of the {@link DoFn}
  * @param <FnOutputT> the output type of the {@link DoFn}
  * @param <OutputT> the output type of the operator, this can be different from the fn output
- *                 type when we have side outputs
+ *                 type when we have additional tagged outputs
  */
 public class DoFnOperator<InputT, FnOutputT, OutputT>
     extends AbstractStreamOperator<OutputT>
@@ -110,7 +110,7 @@ public class DoFnOperator<InputT, FnOutputT, OutputT>
   protected final SerializedPipelineOptions serializedOptions;
 
   protected final TupleTag<FnOutputT> mainOutputTag;
-  protected final List<TupleTag<?>> sideOutputTags;
+  protected final List<TupleTag<?>> additionalOutputTags;
 
   protected final Collection<PCollectionView<?>> sideInputs;
   protected final Map<Integer, PCollectionView<?>> sideInputTagMapping;
@@ -155,7 +155,7 @@ public class DoFnOperator<InputT, FnOutputT, OutputT>
       DoFn<InputT, FnOutputT> doFn,
       Coder<WindowedValue<InputT>> inputCoder,
       TupleTag<FnOutputT> mainOutputTag,
-      List<TupleTag<?>> sideOutputTags,
+      List<TupleTag<?>> additionalOutputTags,
       OutputManagerFactory<OutputT> outputManagerFactory,
       WindowingStrategy<?, ?> windowingStrategy,
       Map<Integer, PCollectionView<?>> sideInputTagMapping,
@@ -165,7 +165,7 @@ public class DoFnOperator<InputT, FnOutputT, OutputT>
     this.doFn = doFn;
     this.inputCoder = inputCoder;
     this.mainOutputTag = mainOutputTag;
-    this.sideOutputTags = sideOutputTags;
+    this.additionalOutputTags = additionalOutputTags;
     this.sideInputTagMapping = sideInputTagMapping;
     this.sideInputs = sideInputs;
     this.serializedOptions = new SerializedPipelineOptions(options);
@@ -275,7 +275,7 @@ public class DoFnOperator<InputT, FnOutputT, OutputT>
         sideInputReader,
         outputManager,
         mainOutputTag,
-        sideOutputTags,
+        additionalOutputTags,
         stepContext,
         aggregatorFactory,
         windowingStrategy);
@@ -619,7 +619,7 @@ public class DoFnOperator<InputT, FnOutputT, OutputT>
       return new DoFnRunners.OutputManager() {
         @Override
         public <T> void output(TupleTag<T> tag, WindowedValue<T> value) {
-          // with side outputs we can't get around this because we don't
+          // with tagged outputs we can't get around this because we don't
           // know our own output type...
           @SuppressWarnings("unchecked")
           OutputT castValue = (OutputT) value;
@@ -675,7 +675,7 @@ public class DoFnOperator<InputT, FnOutputT, OutputT>
     public void noteOutput(WindowedValue<?> output) {}
 
     @Override
-    public void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output) {}
+    public void noteOutput(TupleTag<?> tag, WindowedValue<?> output) {}
 
     @Override
     public <T, W extends BoundedWindow> void writePCollectionViewData(

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SplittableDoFnOperator.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SplittableDoFnOperator.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SplittableDoFnOperator.java
index 0724ac2..1a636c9 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SplittableDoFnOperator.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SplittableDoFnOperator.java
@@ -63,7 +63,7 @@ public class SplittableDoFnOperator<
           WindowedValue<
               KeyedWorkItem<String, ElementAndRestriction<InputT, RestrictionT>>>> inputCoder,
       TupleTag<FnOutputT> mainOutputTag,
-      List<TupleTag<?>> sideOutputTags,
+      List<TupleTag<?>> additionalOutputTags,
       OutputManagerFactory<OutputT> outputManagerFactory,
       WindowingStrategy<?, ?> windowingStrategy,
       Map<Integer, PCollectionView<?>> sideInputTagMapping,
@@ -74,7 +74,7 @@ public class SplittableDoFnOperator<
         doFn,
         inputCoder,
         mainOutputTag,
-        sideOutputTags,
+        additionalOutputTags,
         outputManagerFactory,
         windowingStrategy,
         sideInputTagMapping,
@@ -125,9 +125,9 @@ public class SplittableDoFnOperator<
               }
 
               @Override
-              public <SideOutputT> void sideOutputWindowedValue(
-                  TupleTag<SideOutputT> tag,
-                  SideOutputT output,
+              public <AdditionalOutputT> void outputWindowedValue(
+                  TupleTag<AdditionalOutputT> tag,
+                  AdditionalOutputT output,
                   Instant timestamp,
                   Collection<? extends BoundedWindow> windows,
                   PaneInfo pane) {

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java
index b015f66..8bbc6ef 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java
@@ -57,7 +57,7 @@ public class WindowDoFnOperator<K, InputT, OutputT>
       SystemReduceFn<K, InputT, ?, OutputT, BoundedWindow> systemReduceFn,
       Coder<WindowedValue<KeyedWorkItem<K, InputT>>> inputCoder,
       TupleTag<KV<K, OutputT>> mainOutputTag,
-      List<TupleTag<?>> sideOutputTags,
+      List<TupleTag<?>> additionalOutputTags,
       OutputManagerFactory<WindowedValue<KV<K, OutputT>>> outputManagerFactory,
       WindowingStrategy<?, ?> windowingStrategy,
       Map<Integer, PCollectionView<?>> sideInputTagMapping,
@@ -68,7 +68,7 @@ public class WindowDoFnOperator<K, InputT, OutputT>
         null,
         inputCoder,
         mainOutputTag,
-        sideOutputTags,
+        additionalOutputTags,
         outputManagerFactory,
         windowingStrategy,
         sideInputTagMapping,

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/DoFnOperatorTest.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/DoFnOperatorTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/DoFnOperatorTest.java
index c1fdea3..4c826d1 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/DoFnOperatorTest.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/DoFnOperatorTest.java
@@ -144,19 +144,19 @@ public class DoFnOperatorTest {
         WindowedValue.getValueOnlyCoder(StringUtf8Coder.of());
 
     TupleTag<String> mainOutput = new TupleTag<>("main-output");
-    TupleTag<String> sideOutput1 = new TupleTag<>("side-output-1");
-    TupleTag<String> sideOutput2 = new TupleTag<>("side-output-2");
+    TupleTag<String> additionalOutput1 = new TupleTag<>("output-1");
+    TupleTag<String> additionalOutput2 = new TupleTag<>("output-2");
     ImmutableMap<TupleTag<?>, Integer> outputMapping = ImmutableMap.<TupleTag<?>, Integer>builder()
         .put(mainOutput, 1)
-        .put(sideOutput1, 2)
-        .put(sideOutput2, 3)
+        .put(additionalOutput1, 2)
+        .put(additionalOutput2, 3)
         .build();
 
     DoFnOperator<String, String, RawUnionValue> doFnOperator = new DoFnOperator<>(
-        new MultiOutputDoFn(sideOutput1, sideOutput2),
+        new MultiOutputDoFn(additionalOutput1, additionalOutput2),
         windowedValueCoder,
         mainOutput,
-        ImmutableList.<TupleTag<?>>of(sideOutput1, sideOutput2),
+        ImmutableList.<TupleTag<?>>of(additionalOutput1, additionalOutput2),
         new DoFnOperator.MultiOutputOutputManagerFactory(outputMapping),
         WindowingStrategy.globalDefault(),
         new HashMap<Integer, PCollectionView<?>>(), /* side-input mapping */
@@ -176,8 +176,8 @@ public class DoFnOperatorTest {
     assertThat(
         this.stripStreamRecordFromRawUnion(testHarness.getOutput()),
         contains(
-            new RawUnionValue(2, WindowedValue.valueInGlobalWindow("side: one")),
-            new RawUnionValue(3, WindowedValue.valueInGlobalWindow("side: two")),
+            new RawUnionValue(2, WindowedValue.valueInGlobalWindow("extra: one")),
+            new RawUnionValue(3, WindowedValue.valueInGlobalWindow("extra: two")),
             new RawUnionValue(1, WindowedValue.valueInGlobalWindow("got: hello")),
             new RawUnionValue(2, WindowedValue.valueInGlobalWindow("got: hello")),
             new RawUnionValue(3, WindowedValue.valueInGlobalWindow("got: hello"))));
@@ -542,24 +542,24 @@ public class DoFnOperatorTest {
   }
 
   private static class MultiOutputDoFn extends DoFn<String, String> {
-    private TupleTag<String> sideOutput1;
-    private TupleTag<String> sideOutput2;
+    private TupleTag<String> additionalOutput1;
+    private TupleTag<String> additionalOutput2;
 
-    public MultiOutputDoFn(TupleTag<String> sideOutput1, TupleTag<String> sideOutput2) {
-      this.sideOutput1 = sideOutput1;
-      this.sideOutput2 = sideOutput2;
+    public MultiOutputDoFn(TupleTag<String> additionalOutput1, TupleTag<String> additionalOutput2) {
+      this.additionalOutput1 = additionalOutput1;
+      this.additionalOutput2 = additionalOutput2;
     }
 
     @ProcessElement
     public void processElement(ProcessContext c) throws Exception {
       if (c.element().equals("one")) {
-        c.sideOutput(sideOutput1, "side: one");
+        c.output(additionalOutput1, "extra: one");
       } else if (c.element().equals("two")) {
-        c.sideOutput(sideOutput2, "side: two");
+        c.output(additionalOutput2, "extra: two");
       } else {
         c.output("got: " + c.element());
-        c.sideOutput(sideOutput1, "got: " + c.element());
-        c.sideOutput(sideOutput2, "got: " + c.element());
+        c.output(additionalOutput1, "got: " + c.element());
+        c.output(additionalOutput2, "got: " + c.element());
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverrides.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverrides.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverrides.java
index 3ded079..73f3728 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverrides.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverrides.java
@@ -175,7 +175,7 @@ public class BatchStatefulParDoOverrides {
               ParDo.of(new BatchStatefulDoFn<K, InputT, OutputT>(fn))
                   .withSideInputs(originalParDo.getSideInputs())
                   .withOutputTags(
-                      originalParDo.getMainOutputTag(), originalParDo.getSideOutputTags());
+                      originalParDo.getMainOutputTag(), originalParDo.getAdditionalOutputTags());
 
       return input.apply(new GbkBeforeStatefulParDo<K, InputT>()).apply(statefulParDo);
     }

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java
index 86bfeb6..ead2712 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java
@@ -494,7 +494,7 @@ class BatchViewOverrides {
        */
       private void outputMetadataRecordForSize(
           ProcessContext c, KV<KV<K, W>, WindowedValue<V>> value, long uniqueKeyCount) {
-        c.sideOutput(outputForSize,
+        c.output(outputForSize,
             KV.of(ismCoder.hash(ImmutableList.of(IsmFormat.getMetadataKey(),
                 value.getKey().getValue())),
                 KV.of(value.getKey().getValue(), uniqueKeyCount)));
@@ -503,7 +503,7 @@ class BatchViewOverrides {
       /** This outputs records which will be used to construct the entry set. */
       private void outputMetadataRecordForEntrySet(
           ProcessContext c, KV<KV<K, W>, WindowedValue<V>> value) {
-        c.sideOutput(outputForEntrySet,
+        c.output(outputForEntrySet,
             KV.of(ismCoder.hash(ImmutableList.of(IsmFormat.getMetadataKey(),
                 value.getKey().getValue())),
                 KV.of(value.getKey().getValue(), value.getKey().getKey())));
@@ -773,7 +773,7 @@ class BatchViewOverrides {
           coderForMapLike(windowCoder, inputCoder.getKeyCoder(), inputCoder.getValueCoder());
 
       // Create the various output tags representing the main output containing the data stream
-      // and the side outputs containing the metadata about the size and entry set.
+      // and the additional outputs containing the metadata about the size and entry set.
       TupleTag<IsmRecord<WindowedValue<V>>> mainOutputTag = new TupleTag<>();
       TupleTag<KV<Integer, KV<W, Long>>> outputForSizeTag = new TupleTag<>();
       TupleTag<KV<Integer, KV<W, K>>> outputForEntrySetTag = new TupleTag<>();

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchViewOverridesTest.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchViewOverridesTest.java b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchViewOverridesTest.java
index cd12c92..87395e6 100644
--- a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchViewOverridesTest.java
+++ b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchViewOverridesTest.java
@@ -280,7 +280,7 @@ public class BatchViewOverridesTest {
 
     // Verify the number of unique keys per window.
     assertThat(
-        doFnTester.takeSideOutputElements(outputForSizeTag),
+        doFnTester.takeOutputElements(outputForSizeTag),
         contains(
             KV.of(
                 ismCoder.hash(ImmutableList.of(IsmFormat.getMetadataKey(), windowA)),
@@ -294,7 +294,7 @@ public class BatchViewOverridesTest {
 
     // Verify the output for the unique keys.
     assertThat(
-        doFnTester.takeSideOutputElements(outputForEntrySetTag),
+        doFnTester.takeOutputElements(outputForEntrySetTag),
         contains(
             KV.of(
                 ismCoder.hash(ImmutableList.of(IsmFormat.getMetadataKey(), windowA)),

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
index 0e74fa2..029c28a 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
@@ -412,12 +412,14 @@ public class SparkGroupAlsoByWindowViaWindowSet {
     }
 
     @Override
-    public <SideOutputT> void sideOutputWindowedValue(
-        TupleTag<SideOutputT> tag,
-        SideOutputT output, Instant timestamp,
+    public <AdditionalOutputT> void outputWindowedValue(
+        TupleTag<AdditionalOutputT> tag,
+        AdditionalOutputT output,
+        Instant timestamp,
         Collection<? extends BoundedWindow> windows,
         PaneInfo pane) {
-      throw new UnsupportedOperationException("Side outputs are not allowed in GroupAlsoByWindow.");
+      throw new UnsupportedOperationException(
+          "Tagged outputs are not allowed in GroupAlsoByWindow.");
     }
   }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
index a761954..4cd1683 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
@@ -43,8 +43,8 @@ import scala.Tuple2;
 
 
 /**
- * DoFunctions ignore side outputs. MultiDoFunctions deal with side outputs by enriching the
- * underlying data with multiple TupleTags.
+ * DoFunctions ignore outputs that are not the main output. MultiDoFunctions deal with additional
+ * outputs by enriching the underlying data with multiple TupleTags.
  *
  * @param <InputT> Input type for DoFunction.
  * @param <OutputT> Output type for DoFunction.

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
index d19c4a9..ccc0fa3 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
@@ -164,12 +164,12 @@ public class SparkGroupAlsoByWindowViaOutputBufferFn<K, InputT, W extends Bounde
     }
 
     @Override
-    public <SideOutputT> void sideOutputWindowedValue(
-        TupleTag<SideOutputT> tag,
-        SideOutputT output,
+    public <AdditionalOutputT> void outputWindowedValue(
+        TupleTag<AdditionalOutputT> tag,
+        AdditionalOutputT output,
         Instant timestamp,
         Collection<? extends BoundedWindow> windows, PaneInfo pane) {
-      throw new UnsupportedOperationException("GroupAlsoByWindow should not use side outputs.");
+      throw new UnsupportedOperationException("GroupAlsoByWindow should not use tagged outputs.");
     }
 
     Iterable<WindowedValue<KV<K, Iterable<V>>>> getOutputs() {

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
index 4f8a1a5..3e8dde5 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkProcessContext.java
@@ -113,7 +113,7 @@ class SparkProcessContext<FnInputT, FnOutputT, OutputT> {
     public void noteOutput(WindowedValue<?> output) { }
 
     @Override
-    public void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output) { }
+    public void noteOutput(TupleTag<?> tag, WindowedValue<?> output) { }
 
     @Override
     public <T, W extends BoundedWindow> void writePCollectionViewData(

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
index 65892d2..000eada 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
@@ -385,7 +385,7 @@ public final class StreamingTransformTranslator {
         JavaDStream<WindowedValue<InputT>> dStream = unboundedDataset.getDStream();
 
         final String stepName = context.getCurrentTransform().getFullName();
-        if (transform.getSideOutputTags().size() == 0) {
+        if (transform.getAdditionalOutputTags().size() == 0) {
           JavaPairDStream<TupleTag<?>, WindowedValue<?>> all =
               dStream.transformToPair(
                   new Function<

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
index 8fe4831..58d65d0 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
@@ -2191,7 +2191,7 @@ public class Combine {
                 c.output(kv);
               } else {
                 int nonce = counter++ % spread;
-                c.sideOutput(hot, KV.of(KV.of(kv.getKey(), nonce), kv.getValue()));
+                c.output(hot, KV.of(KV.of(kv.getKey(), nonce), kv.getValue()));
               }
             }
           })

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
index 74a1348..d3da251 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
@@ -153,14 +153,14 @@ public abstract class DoFn<InputT, OutputT> implements Serializable, HasDisplayD
     public abstract void outputWithTimestamp(OutputT output, Instant timestamp);
 
     /**
-     * Adds the given element to the side output {@code PCollection} with the
+     * Adds the given element to the output {@code PCollection} with the
      * given tag.
      *
-     * <p>Once passed to {@code sideOutput} the element should not be modified
+     * <p>Once passed to {@code output} the element should not be modified
      * in any way.
      *
      * <p>The caller of {@code ParDo} uses {@link ParDo.SingleOutput#withOutputTags} to
-     * specify the tags of side outputs that it consumes. Non-consumed side
+     * specify the tags of outputs that it consumes. Non-consumed
      * outputs, e.g., outputs for monitoring purposes only, don't necessarily
      * need to be specified.
      *
@@ -180,13 +180,13 @@ public abstract class DoFn<InputT, OutputT> implements Serializable, HasDisplayD
      *
      * @see ParDo.SingleOutput#withOutputTags
      */
-    public abstract <T> void sideOutput(TupleTag<T> tag, T output);
+    public abstract <T> void output(TupleTag<T> tag, T output);
 
     /**
-     * Adds the given element to the specified side output {@code PCollection},
+     * Adds the given element to the specified output {@code PCollection},
      * with the given timestamp.
      *
-     * <p>Once passed to {@code sideOutputWithTimestamp} the element should not be
+     * <p>Once passed to {@code outputWithTimestamp} the element should not be
      * modified in any way.
      *
      * <p>If invoked from {@link ProcessElement}), the timestamp
@@ -207,7 +207,7 @@ public abstract class DoFn<InputT, OutputT> implements Serializable, HasDisplayD
      *
      * @see ParDo.SingleOutput#withOutputTags
      */
-    public abstract <T> void sideOutputWithTimestamp(
+    public abstract <T> void outputWithTimestamp(
         TupleTag<T> tag, T output, Instant timestamp);
 
     /**

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
index 88f4035..5446431 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java
@@ -65,7 +65,7 @@ import org.joda.time.Instant;
  *
  * // Set arguments shared across all bundles:
  * fnTester.setSideInputs(...);      // If fn takes side inputs.
- * fnTester.setSideOutputTags(...);  // If fn writes to side outputs.
+ * fnTester.setOutputTags(...);  // If fn writes to more than one output.
  *
  * // Process a bundle containing a single input element:
  * Input testInput = ...;
@@ -464,14 +464,14 @@ public class DoFnTester<InputT, OutputT> implements AutoCloseable {
   }
 
   /**
-   * Returns the elements output so far to the side output with the
+   * Returns the elements output so far to the output with the
    * given tag.  Does not clear them, so subsequent calls will
    * continue to include these elements.
    *
-   * @see #takeSideOutputElements
-   * @see #clearSideOutputElements
+   * @see #takeOutputElements
+   * @see #clearOutputElements
    */
-  public <T> List<T> peekSideOutputElements(TupleTag<T> tag) {
+  public <T> List<T> peekOutputElements(TupleTag<T> tag) {
     // TODO: Should we return an unmodifiable list?
     return Lists.transform(getImmutableOutput(tag),
         new Function<ValueInSingleWindow<T>, T>() {
@@ -483,24 +483,23 @@ public class DoFnTester<InputT, OutputT> implements AutoCloseable {
   }
 
   /**
-   * Clears the record of the elements output so far to the side
-   * output with the given tag.
+   * Clears the record of the elements output so far to the output with the given tag.
    *
-   * @see #peekSideOutputElements
+   * @see #peekOutputElements
    */
-  public <T> void clearSideOutputElements(TupleTag<T> tag) {
+  public <T> void clearOutputElements(TupleTag<T> tag) {
     getMutableOutput(tag).clear();
   }
 
   /**
-   * Returns the elements output so far to the side output with the given tag.
+   * Returns the elements output so far to the output with the given tag.
    * Clears the list so these elements don't appear in future calls.
    *
-   * @see #peekSideOutputElements
+   * @see #peekOutputElements
    */
-  public <T> List<T> takeSideOutputElements(TupleTag<T> tag) {
-    List<T> resultElems = new ArrayList<>(peekSideOutputElements(tag));
-    clearSideOutputElements(tag);
+  public <T> List<T> takeOutputElements(TupleTag<T> tag) {
+    List<T> resultElems = new ArrayList<>(peekOutputElements(tag));
+    clearOutputElements(tag);
     return resultElems;
   }
 
@@ -563,12 +562,12 @@ public class DoFnTester<InputT, OutputT> implements AutoCloseable {
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
       throwUnsupportedOutputFromBundleMethods();
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
+    public <T> void output(TupleTag<T> tag, T output) {
       throwUnsupportedOutputFromBundleMethods();
     }
 
@@ -683,21 +682,21 @@ public class DoFnTester<InputT, OutputT> implements AutoCloseable {
 
     @Override
     public void output(OutputT output) {
-      sideOutput(mainOutputTag, output);
+      output(mainOutputTag, output);
     }
 
     @Override
     public void outputWithTimestamp(OutputT output, Instant timestamp) {
-      sideOutputWithTimestamp(mainOutputTag, output, timestamp);
+      outputWithTimestamp(mainOutputTag, output, timestamp);
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
-      sideOutputWithTimestamp(tag, output, element.getTimestamp());
+    public <T> void output(TupleTag<T> tag, T output) {
+      outputWithTimestamp(tag, output, element.getTimestamp());
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
       getMutableOutput(tag)
           .add(ValueInSingleWindow.of(output, timestamp, element.getWindow(), element.getPane()));
     }

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
index 3de845b..e3777ac 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java
@@ -35,6 +35,7 @@ import org.apache.beam.sdk.coders.CoderRegistry;
 import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.transforms.display.DisplayData.Builder;
+import org.apache.beam.sdk.transforms.display.DisplayData.ItemSpec;
 import org.apache.beam.sdk.transforms.display.HasDisplayData;
 import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
 import org.apache.beam.sdk.transforms.reflect.DoFnSignature.MethodWithExtraParameters;
@@ -103,7 +104,7 @@ import org.apache.beam.sdk.values.TypedPValue;
  * <p>Each of the calls to any of the {@link DoFn DoFn's} processing
  * methods can produce zero or more output elements. All of the
  * of output elements from all of the {@link DoFn} instances
- * are included in the output {@link PCollection}.
+ * are included in an output {@link PCollection}.
  *
  * <p>For example:
  *
@@ -180,20 +181,20 @@ import org.apache.beam.sdk.values.TypedPValue;
  *         }}));
  * }</pre>
  *
- * <h2>Side Outputs</h2>
+ * <h2>Additional Outputs</h2>
  *
  * <p>Optionally, a {@link ParDo} transform can produce multiple
  * output {@link PCollection PCollections}, both a "main output"
- * {@code PCollection<OutputT>} plus any number of "side output"
+ * {@code PCollection<OutputT>} plus any number of additional output
  * {@link PCollection PCollections}, each keyed by a distinct {@link TupleTag},
  * and bundled in a {@link PCollectionTuple}. The {@link TupleTag TupleTags}
  * to be used for the output {@link PCollectionTuple} are specified by
- * invoking {@link SingleOutput#withOutputTags}. Unconsumed side outputs do not
+ * invoking {@link SingleOutput#withOutputTags}. Unconsumed outputs do not
  * necessarily need to be explicitly specified, even if the {@link DoFn}
  * generates them. Within the {@link DoFn}, an element is added to the
  * main output {@link PCollection} as normal, using
- * {@link DoFn.Context#output}, while an element is added to a side output
- * {@link PCollection} using {@link DoFn.Context#sideOutput}. For example:
+ * {@link DoFn.Context#output(Object)}, while an element is added to any additional output
+ * {@link PCollection} using {@link DoFn.Context#output(TupleTag, Object)}. For example:
  *
  * <pre>{@code
  * PCollection<String> words = ...;
@@ -201,7 +202,7 @@ import org.apache.beam.sdk.values.TypedPValue;
  * // plus the lengths of words that are above the cut off.
  * // Also select words starting with "MARKER".
  * final int wordLengthCutOff = 10;
- * // Create tags to use for the main and side outputs.
+ * // Create tags to use for the main and additional outputs.
  * final TupleTag<String> wordsBelowCutOffTag =
  *     new TupleTag<String>(){};
  * final TupleTag<Integer> wordLengthsAboveCutOffTag =
@@ -212,7 +213,7 @@ import org.apache.beam.sdk.values.TypedPValue;
  *     words.apply(
  *         ParDo
  *         .of(new DoFn<String, String>() {
- *             // Create a tag for the unconsumed side output.
+ *             // Create a tag for the unconsumed output.
  *             final TupleTag<String> specialWordsTag =
  *                 new TupleTag<String>(){};
  *            {@literal @}ProcessElement
@@ -222,19 +223,19 @@ import org.apache.beam.sdk.values.TypedPValue;
  *                 // Emit this short word to the main output.
  *                 c.output(word);
  *               } else {
- *                 // Emit this long word's length to a side output.
- *                 c.sideOutput(wordLengthsAboveCutOffTag, word.length());
+ *                 // Emit this long word's length to a specified output.
+ *                 c.output(wordLengthsAboveCutOffTag, word.length());
  *               }
  *               if (word.startsWith("MARKER")) {
- *                 // Emit this word to a different side output.
- *                 c.sideOutput(markedWordsTag, word);
+ *                 // Emit this word to a different specified output.
+ *                 c.output(markedWordsTag, word);
  *               }
  *               if (word.startsWith("SPECIAL")) {
- *                 // Emit this word to the unconsumed side output.
- *                 c.sideOutput(specialWordsTag, word);
+ *                 // Emit this word to the unconsumed output.
+ *                 c.output(specialWordsTag, word);
  *               }
  *             }})
- *             // Specify the main and consumed side output tags of the
+ *             // Specify the main and consumed output tags of the
  *             // PCollectionTuple result:
  *         .withOutputTags(wordsBelowCutOffTag,
  *             TupleTagList.of(wordLengthsAboveCutOffTag)
@@ -254,9 +255,9 @@ import org.apache.beam.sdk.values.TypedPValue;
  * elements of the main output {@link PCollection PCollection&lt;OutputT&gt;} is
  * inferred from the concrete type of the {@link DoFn DoFn&lt;InputT, OutputT&gt;}.
  *
- * <p>By default, the {@link Coder Coder&lt;SideOutputT&gt;} for the elements of
- * a side output {@link PCollection PCollection&lt;SideOutputT&gt;} is inferred
- * from the concrete type of the corresponding {@link TupleTag TupleTag&lt;SideOutputT&gt;}.
+ * <p>By default, the {@link Coder Coder&lt;AdditionalOutputT&gt;} for the elements of
+ * an output {@link PCollection PCollection&lt;AdditionalOutputT&gt;} is inferred
+ * from the concrete type of the corresponding {@link TupleTag TupleTag&lt;AdditionalOutputT&gt;}.
  * To be successful, the {@link TupleTag} should be created as an instance
  * of a trivial anonymous subclass, with {@code {}} suffixed to the
  * constructor call. Such uses block Java's generic type parameter
@@ -265,12 +266,12 @@ import org.apache.beam.sdk.values.TypedPValue;
  * <pre> {@code
  * // A TupleTag to use for a side input can be written concisely:
  * final TupleTag<Integer> sideInputag = new TupleTag<>();
- * // A TupleTag to use for a side output should be written with "{}",
+ * // A TupleTag to use for an output should be written with "{}",
  * // and explicit generic parameter type:
- * final TupleTag<String> sideOutputTag = new TupleTag<String>(){};
+ * final TupleTag<String> additionalOutputTag = new TupleTag<String>(){};
  * } </pre>
  * This style of {@code TupleTag} instantiation is used in the example of
- * multiple side outputs, above.
+ * {@link ParDo ParDos} that produce multiple outputs, above.
  *
  * <h2>Serializability of {@link DoFn DoFns}</h2>
  *
@@ -358,7 +359,7 @@ import org.apache.beam.sdk.values.TypedPValue;
  * that state across Java processes. All information should be
  * communicated to {@link DoFn} instances via main and side inputs and
  * serialized state, and all output should be communicated from a
- * {@link DoFn} instance via main and side outputs, in the absence of
+ * {@link DoFn} instance via output {@link PCollection PCollections}, in the absence of
  * external communication mechanisms written by user code.
  *
  * <h2>Fault Tolerance</h2>
@@ -602,14 +603,14 @@ public class ParDo {
 
     /**
      * Returns a new multi-output {@link ParDo} {@link PTransform} that's like this {@link
-     * PTransform} but with the specified main and side output tags. Does not modify this {@link
+     * PTransform} but with the specified output tags. Does not modify this {@link
      * PTransform}.
      *
-     * <p>See the discussion of Side Outputs above for more explanation.
+     * <p>See the discussion of Additional Outputs above for more explanation.
      */
     public MultiOutput<InputT, OutputT> withOutputTags(
-        TupleTag<OutputT> mainOutputTag, TupleTagList sideOutputTags) {
-      return new MultiOutput<>(fn, sideInputs, mainOutputTag, sideOutputTags, fnDisplayData);
+        TupleTag<OutputT> mainOutputTag, TupleTagList additionalOutputTags) {
+      return new MultiOutput<>(fn, sideInputs, mainOutputTag, additionalOutputTags, fnDisplayData);
     }
 
     @Override
@@ -671,11 +672,9 @@ public class ParDo {
   }
 
   /**
-   * A {@link PTransform} that, when applied to a
-   * {@code PCollection<InputT>}, invokes a user-specified
-   * {@code DoFn<InputT, OutputT>} on all its elements, which can emit elements
-   * to any of the {@link PTransform}'s main and side output
-   * {@code PCollection}s, which are bundled into a result
+   * A {@link PTransform} that, when applied to a {@code PCollection<InputT>}, invokes a
+   * user-specified {@code DoFn<InputT, OutputT>} on all its elements, which can emit elements to
+   * any of the {@link PTransform}'s output {@code PCollection}s, which are bundled into a result
    * {@code PCollectionTuple}.
    *
    * @param <InputT> the type of the (main) input {@code PCollection} elements
@@ -685,7 +684,7 @@ public class ParDo {
       extends PTransform<PCollection<? extends InputT>, PCollectionTuple> {
     private final List<PCollectionView<?>> sideInputs;
     private final TupleTag<OutputT> mainOutputTag;
-    private final TupleTagList sideOutputTags;
+    private final TupleTagList additionalOutputTags;
     private final DisplayData.ItemSpec<? extends Class<?>> fnDisplayData;
     private final DoFn<InputT, OutputT> fn;
 
@@ -693,11 +692,11 @@ public class ParDo {
         DoFn<InputT, OutputT> fn,
         List<PCollectionView<?>> sideInputs,
         TupleTag<OutputT> mainOutputTag,
-        TupleTagList sideOutputTags,
-        DisplayData.ItemSpec<? extends Class<?>> fnDisplayData) {
+        TupleTagList additionalOutputTags,
+        ItemSpec<? extends Class<?>> fnDisplayData) {
       this.sideInputs = sideInputs;
       this.mainOutputTag = mainOutputTag;
-      this.sideOutputTags = sideOutputTags;
+      this.additionalOutputTags = additionalOutputTags;
       this.fn = SerializableUtils.clone(fn);
       this.fnDisplayData = fnDisplayData;
     }
@@ -730,7 +729,7 @@ public class ParDo {
               .addAll(sideInputs)
               .build(),
           mainOutputTag,
-          sideOutputTags,
+          additionalOutputTags,
           fnDisplayData);
     }
 
@@ -745,7 +744,7 @@ public class ParDo {
 
       PCollectionTuple outputs = PCollectionTuple.ofPrimitiveOutputsInternal(
           input.getPipeline(),
-          TupleTagList.of(mainOutputTag).and(sideOutputTags.getAll()),
+          TupleTagList.of(mainOutputTag).and(additionalOutputTags.getAll()),
           input.getWindowingStrategy(),
           input.isBounded());
 
@@ -794,8 +793,8 @@ public class ParDo {
       return mainOutputTag;
     }
 
-    public TupleTagList getSideOutputTags() {
-      return sideOutputTags;
+    public TupleTagList getAdditionalOutputTags() {
+      return additionalOutputTags;
     }
 
     public List<PCollectionView<?>> getSideInputs() {

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
index 2031bc9..595d18c 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java
@@ -169,7 +169,7 @@ public class Partition<T> extends PTransform<PCollection<T>, PCollectionList<T>>
       if (0 <= partition && partition < numPartitions) {
         @SuppressWarnings("unchecked")
         TupleTag<X> typedTag = (TupleTag<X>) outputTags.get(partition);
-        c.sideOutput(typedTag, input);
+        c.output(typedTag, input);
       } else {
         throw new IndexOutOfBoundsException(
             "Partition function returned out of bounds index: "

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionTuple.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionTuple.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionTuple.java
index 0ab26ca..ce67e94 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionTuple.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionTuple.java
@@ -37,8 +37,7 @@ import org.apache.beam.sdk.values.PCollection.IsBounded;
  * {@link PTransform} taking
  * or producing multiple PCollection inputs or outputs that can be of
  * different types, for instance a
- * {@link ParDo} with side
- * outputs.
+ * {@link ParDo} with multiple outputs.
  *
  * <p>A {@link PCollectionTuple} can be created and accessed like follows:
  * <pre> {@code

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java
index a6b63ab..37d41f7 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java
@@ -31,25 +31,23 @@ import org.apache.beam.sdk.util.CloudObject;
 import org.apache.beam.sdk.util.PropertyNames;
 
 /**
- * A {@link TupleTag} is a typed tag to use as the key of a
- * heterogeneously typed tuple, like {@link PCollectionTuple}.
- * Its generic type parameter allows tracking
- * the static type of things stored in tuples.
+ * A {@link TupleTag} is a typed tag to use as the key of a heterogeneously typed tuple, like {@link
+ * PCollectionTuple}. Its generic type parameter allows tracking the static type of things stored in
+ * tuples.
  *
- * <p>To aid in assigning default {@link org.apache.beam.sdk.coders.Coder Coders} for results of
- * side outputs of {@link ParDo}, an output
- * {@link TupleTag} should be instantiated with an extra {@code {}} so
- * it is an instance of an anonymous subclass without generic type
- * parameters.  Input {@link TupleTag TupleTags} require no such extra
- * instantiation (although it doesn't hurt).  For example:
+ * <p>To aid in assigning default {@link org.apache.beam.sdk.coders.Coder Coders} for results of a
+ * {@link ParDo}, an output {@link TupleTag} should be instantiated with an extra {@code {}} so it
+ * is an instance of an anonymous subclass without generic type parameters. Input {@link TupleTag
+ * TupleTags} require no such extra instantiation (although it doesn't hurt). For example:
  *
- * <pre> {@code
+ * <pre>{@code
  * TupleTag<SomeType> inputTag = new TupleTag<>();
  * TupleTag<SomeOtherType> outputTag = new TupleTag<SomeOtherType>(){};
- * } </pre>
+ * }
+ * </pre>
  *
- * @param <V> the type of the elements or values of the tagged thing,
- * e.g., a {@code PCollection<V>}.
+ * @param <V> the type of the elements or values of the tagged thing, e.g., a {@code
+ *     PCollection<V>}.
  */
 public class TupleTag<V> implements Serializable {
   /**

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTagList.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTagList.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTagList.java
index b4ce941..5aeff5e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTagList.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTagList.java
@@ -28,7 +28,7 @@ import org.apache.beam.sdk.transforms.ParDo;
 /**
  * A {@link TupleTagList} is an immutable list of heterogeneously
  * typed {@link TupleTag TupleTags}. A {@link TupleTagList} is used, for instance, to
- * specify the tags of the side outputs of a
+ * specify the tags of the additional outputs of a
  * {@link ParDo}.
  *
  * <p>A {@link TupleTagList} can be created and accessed like follows:

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypedPValue.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypedPValue.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypedPValue.java
index d353835..54af747 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypedPValue.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypedPValue.java
@@ -148,14 +148,14 @@ public abstract class TypedPValue<T> extends PValueBase implements PValue {
         return new CoderOrFailure<>(registry.getDefaultCoder(token), null);
       } catch (CannotProvideCoderException exc) {
         inferFromTokenException = exc;
-        // Attempt to detect when the token came from a TupleTag used for a ParDo side output,
+        // Attempt to detect when the token came from a TupleTag used for a ParDo output,
         // and provide a better error message if so. Unfortunately, this information is not
         // directly available from the TypeDescriptor, so infer based on the type of the PTransform
         // and the error message itself.
         if (transform instanceof ParDo.MultiOutput
             && exc.getReason() == ReasonCode.TYPE_ERASURE) {
           inferFromTokenException = new CannotProvideCoderException(exc.getMessage()
-              + " If this error occurs for a side output of the producing ParDo, verify that the "
+              + " If this error occurs for an output of the producing ParDo, verify that the "
               + "TupleTag for this output is constructed with proper type information (see "
               + "TupleTag Javadoc) or explicitly set the Coder to use if this is not possible.");
         }

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/test/java/org/apache/beam/sdk/metrics/MetricsTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/metrics/MetricsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/metrics/MetricsTest.java
index 3555db3..afe384d 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/metrics/MetricsTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/metrics/MetricsTest.java
@@ -221,7 +221,7 @@ public class MetricsTest implements Serializable {
                 values.update(element);
                 gauge.set(12L);
                 c.output(element);
-                c.sideOutput(output2, element);
+                c.output(output2, element);
               }
             })
             .withOutputTags(output1, TupleTagList.of(output2)));

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
index b429eab..589c744 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
@@ -153,15 +153,15 @@ public class ParDoTest implements Serializable {
     State state = State.NOT_SET_UP;
 
     final List<PCollectionView<Integer>> sideInputViews = new ArrayList<>();
-    final List<TupleTag<String>> sideOutputTupleTags = new ArrayList<>();
+    final List<TupleTag<String>> additionalOutputTupleTags = new ArrayList<>();
 
     public TestDoFn() {
     }
 
     public TestDoFn(List<PCollectionView<Integer>> sideInputViews,
-                    List<TupleTag<String>> sideOutputTupleTags) {
+                    List<TupleTag<String>> additionalOutputTupleTags) {
       this.sideInputViews.addAll(sideInputViews);
-      this.sideOutputTupleTags.addAll(sideOutputTupleTags);
+      this.additionalOutputTupleTags.addAll(additionalOutputTupleTags);
     }
 
     @Setup
@@ -197,9 +197,9 @@ public class ParDoTest implements Serializable {
 
     private void outputToAll(Context c, String value) {
       c.output(value);
-      for (TupleTag<String> sideOutputTupleTag : sideOutputTupleTags) {
-        c.sideOutput(sideOutputTupleTag,
-                     sideOutputTupleTag.getId() + ": " + value);
+      for (TupleTag<String> additionalOutputTupleTag : additionalOutputTupleTags) {
+        c.output(additionalOutputTupleTag,
+                     additionalOutputTupleTag.getId() + ": " + value);
       }
     }
 
@@ -212,9 +212,9 @@ public class ParDoTest implements Serializable {
         value += ": " + sideInputValues;
       }
       c.output(value);
-      for (TupleTag<String> sideOutputTupleTag : sideOutputTupleTags) {
-        c.sideOutput(sideOutputTupleTag,
-                     sideOutputTupleTag.getId() + ": " + value);
+      for (TupleTag<String> additionalOutputTupleTag : additionalOutputTupleTags) {
+        c.output(additionalOutputTupleTag,
+                     additionalOutputTupleTag.getId() + ": " + value);
       }
     }
   }
@@ -389,90 +389,90 @@ public class ParDoTest implements Serializable {
 
   @Test
   @Category(ValidatesRunner.class)
-  public void testParDoWithSideOutputs() {
+  public void testParDoWithTaggedOutput() {
 
     List<Integer> inputs = Arrays.asList(3, -42, 666);
 
     TupleTag<String> mainOutputTag = new TupleTag<String>("main"){};
-    TupleTag<String> sideOutputTag1 = new TupleTag<String>("side1"){};
-    TupleTag<String> sideOutputTag2 = new TupleTag<String>("side2"){};
-    TupleTag<String> sideOutputTag3 = new TupleTag<String>("side3"){};
-    TupleTag<String> sideOutputTagUnwritten = new TupleTag<String>("sideUnwritten"){};
+    TupleTag<String> additionalOutputTag1 = new TupleTag<String>("additional1"){};
+    TupleTag<String> additionalOutputTag2 = new TupleTag<String>("additional2"){};
+    TupleTag<String> additionalOutputTag3 = new TupleTag<String>("additional3"){};
+    TupleTag<String> additionalOutputTagUnwritten = new TupleTag<String>("unwrittenOutput"){};
 
     PCollectionTuple outputs = pipeline
         .apply(Create.of(inputs))
         .apply(ParDo
                .of(new TestDoFn(
                    Arrays.<PCollectionView<Integer>>asList(),
-                   Arrays.asList(sideOutputTag1, sideOutputTag2, sideOutputTag3)))
+                   Arrays.asList(additionalOutputTag1, additionalOutputTag2, additionalOutputTag3)))
                .withOutputTags(
                    mainOutputTag,
-                   TupleTagList.of(sideOutputTag3)
-                       .and(sideOutputTag1)
-                       .and(sideOutputTagUnwritten)
-                       .and(sideOutputTag2)));
+                   TupleTagList.of(additionalOutputTag3)
+                       .and(additionalOutputTag1)
+                       .and(additionalOutputTagUnwritten)
+                       .and(additionalOutputTag2)));
 
     PAssert.that(outputs.get(mainOutputTag))
         .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
 
-    PAssert.that(outputs.get(sideOutputTag1))
+    PAssert.that(outputs.get(additionalOutputTag1))
         .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                   .fromSideOutput(sideOutputTag1));
-    PAssert.that(outputs.get(sideOutputTag2))
+                   .fromOutput(additionalOutputTag1));
+    PAssert.that(outputs.get(additionalOutputTag2))
         .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                   .fromSideOutput(sideOutputTag2));
-    PAssert.that(outputs.get(sideOutputTag3))
+                   .fromOutput(additionalOutputTag2));
+    PAssert.that(outputs.get(additionalOutputTag3))
         .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                   .fromSideOutput(sideOutputTag3));
-    PAssert.that(outputs.get(sideOutputTagUnwritten)).empty();
+                   .fromOutput(additionalOutputTag3));
+    PAssert.that(outputs.get(additionalOutputTagUnwritten)).empty();
 
     pipeline.run();
   }
 
   @Test
   @Category(ValidatesRunner.class)
-  public void testParDoEmptyWithSideOutputs() {
+  public void testParDoEmptyWithTaggedOutput() {
     TupleTag<String> mainOutputTag = new TupleTag<String>("main"){};
-    TupleTag<String> sideOutputTag1 = new TupleTag<String>("side1"){};
-    TupleTag<String> sideOutputTag2 = new TupleTag<String>("side2"){};
-    TupleTag<String> sideOutputTag3 = new TupleTag<String>("side3"){};
-    TupleTag<String> sideOutputTagUnwritten = new TupleTag<String>("sideUnwritten"){};
+    TupleTag<String> additionalOutputTag1 = new TupleTag<String>("additional1"){};
+    TupleTag<String> additionalOutputTag2 = new TupleTag<String>("additional2"){};
+    TupleTag<String> additionalOutputTag3 = new TupleTag<String>("additional3"){};
+    TupleTag<String> additionalOutputTagUnwritten = new TupleTag<String>("unwrittenOutput"){};
 
     PCollectionTuple outputs = pipeline
         .apply(Create.empty(VarIntCoder.of()))
         .apply(ParDo
                .of(new TestDoFn(
                    Arrays.<PCollectionView<Integer>>asList(),
-                   Arrays.asList(sideOutputTag1, sideOutputTag2, sideOutputTag3)))
+                   Arrays.asList(additionalOutputTag1, additionalOutputTag2, additionalOutputTag3)))
                .withOutputTags(
                    mainOutputTag,
-                   TupleTagList.of(sideOutputTag3).and(sideOutputTag1)
-                   .and(sideOutputTagUnwritten).and(sideOutputTag2)));
+                   TupleTagList.of(additionalOutputTag3).and(additionalOutputTag1)
+                   .and(additionalOutputTagUnwritten).and(additionalOutputTag2)));
 
     List<Integer> inputs = Collections.emptyList();
     PAssert.that(outputs.get(mainOutputTag))
         .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
 
-    PAssert.that(outputs.get(sideOutputTag1))
+    PAssert.that(outputs.get(additionalOutputTag1))
         .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                   .fromSideOutput(sideOutputTag1));
-    PAssert.that(outputs.get(sideOutputTag2))
+                   .fromOutput(additionalOutputTag1));
+    PAssert.that(outputs.get(additionalOutputTag2))
         .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                   .fromSideOutput(sideOutputTag2));
-    PAssert.that(outputs.get(sideOutputTag3))
+                   .fromOutput(additionalOutputTag2));
+    PAssert.that(outputs.get(additionalOutputTag3))
         .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs)
-                   .fromSideOutput(sideOutputTag3));
-    PAssert.that(outputs.get(sideOutputTagUnwritten)).empty();
+                   .fromOutput(additionalOutputTag3));
+    PAssert.that(outputs.get(additionalOutputTagUnwritten)).empty();
 
     pipeline.run();
   }
 
   @Test
   @Category(ValidatesRunner.class)
-  public void testParDoWithEmptySideOutputs() {
+  public void testParDoWithEmptyTaggedOutput() {
     TupleTag<String> mainOutputTag = new TupleTag<String>("main"){};
-    TupleTag<String> sideOutputTag1 = new TupleTag<String>("side1"){};
-    TupleTag<String> sideOutputTag2 = new TupleTag<String>("side2"){};
+    TupleTag<String> additionalOutputTag1 = new TupleTag<String>("additional1"){};
+    TupleTag<String> additionalOutputTag2 = new TupleTag<String>("additional2"){};
 
     PCollectionTuple outputs = pipeline
         .apply(Create.empty(VarIntCoder.of()))
@@ -480,12 +480,12 @@ public class ParDoTest implements Serializable {
                .of(new TestNoOutputDoFn())
                .withOutputTags(
                    mainOutputTag,
-                   TupleTagList.of(sideOutputTag1).and(sideOutputTag2)));
+                   TupleTagList.of(additionalOutputTag1).and(additionalOutputTag2)));
 
     PAssert.that(outputs.get(mainOutputTag)).empty();
 
-    PAssert.that(outputs.get(sideOutputTag1)).empty();
-    PAssert.that(outputs.get(sideOutputTag2)).empty();
+    PAssert.that(outputs.get(additionalOutputTag1)).empty();
+    PAssert.that(outputs.get(additionalOutputTag2)).empty();
 
     pipeline.run();
   }
@@ -493,12 +493,12 @@ public class ParDoTest implements Serializable {
 
   @Test
   @Category(ValidatesRunner.class)
-  public void testParDoWithOnlySideOutputs() {
+  public void testParDoWithOnlyTaggedOutput() {
 
     List<Integer> inputs = Arrays.asList(3, -42, 666);
 
     final TupleTag<Void> mainOutputTag = new TupleTag<Void>("main"){};
-    final TupleTag<Integer> sideOutputTag = new TupleTag<Integer>("side"){};
+    final TupleTag<Integer> additionalOutputTag = new TupleTag<Integer>("additional"){};
 
     PCollectionTuple outputs = pipeline
         .apply(Create.of(inputs))
@@ -506,29 +506,29 @@ public class ParDoTest implements Serializable {
             .of(new DoFn<Integer, Void>(){
                 @ProcessElement
                 public void processElement(ProcessContext c) {
-                  c.sideOutput(sideOutputTag, c.element());
+                  c.output(additionalOutputTag, c.element());
                 }})
-            .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag)));
+            .withOutputTags(mainOutputTag, TupleTagList.of(additionalOutputTag)));
 
     PAssert.that(outputs.get(mainOutputTag)).empty();
-    PAssert.that(outputs.get(sideOutputTag)).containsInAnyOrder(inputs);
+    PAssert.that(outputs.get(additionalOutputTag)).containsInAnyOrder(inputs);
 
     pipeline.run();
   }
 
   @Test
   @Category(NeedsRunner.class)
-  public void testParDoWritingToUndeclaredSideOutput() {
+  public void testParDoWritingToUndeclaredTag() {
 
     List<Integer> inputs = Arrays.asList(3, -42, 666);
 
-    TupleTag<String> sideTag = new TupleTag<String>("side"){};
+    TupleTag<String> notOutputTag = new TupleTag<String>("additional"){};
 
     PCollection<String> output = pipeline
         .apply(Create.of(inputs))
         .apply(ParDo.of(new TestDoFn(
             Arrays.<PCollectionView<Integer>>asList(),
-            Arrays.asList(sideTag))));
+            Arrays.asList(notOutputTag))));
 
     PAssert.that(output)
         .satisfies(ParDoTest.HasExpectedOutput.forInput(inputs));
@@ -539,7 +539,7 @@ public class ParDoTest implements Serializable {
   @Test
   // TODO: The exception thrown is runner-specific, even if the behavior is general
   @Category(NeedsRunner.class)
-  public void testParDoUndeclaredSideOutputLimit() {
+  public void testParDoUndeclaredTagLimit() {
 
     PCollection<Integer> input = pipeline.apply(Create.of(Arrays.asList(3)));
 
@@ -548,13 +548,13 @@ public class ParDoTest implements Serializable {
         .apply("Success1000", ParDo.of(new DoFn<Integer, String>() {
             @ProcessElement
             public void processElement(ProcessContext c) {
-              TupleTag<String> specialSideTag = new TupleTag<String>(){};
-              c.sideOutput(specialSideTag, "side");
-              c.sideOutput(specialSideTag, "side");
-              c.sideOutput(specialSideTag, "side");
+              TupleTag<String> specialOutputTag = new TupleTag<String>(){};
+              c.output(specialOutputTag, "special");
+              c.output(specialOutputTag, "special");
+              c.output(specialOutputTag, "special");
 
               for (int i = 0; i < 998; i++) {
-                c.sideOutput(new TupleTag<String>(){}, "side");
+                c.output(new TupleTag<String>(){}, "tag" + i);
               }
             }}));
     pipeline.run();
@@ -565,12 +565,12 @@ public class ParDoTest implements Serializable {
             @ProcessElement
             public void processElement(ProcessContext c) {
               for (int i = 0; i < 1000; i++) {
-                c.sideOutput(new TupleTag<String>(){}, "side");
+                c.output(new TupleTag<String>(){}, "output" + i);
               }
             }}));
 
     thrown.expect(RuntimeException.class);
-    thrown.expectMessage("the number of side outputs has exceeded a limit");
+    thrown.expectMessage("the number of outputs has exceeded a limit");
     pipeline.run();
   }
 
@@ -647,7 +647,7 @@ public class ParDoTest implements Serializable {
     List<Integer> inputs = Arrays.asList(3, -42, 666);
 
     final TupleTag<String> mainOutputTag = new TupleTag<String>("main"){};
-    final TupleTag<Void> sideOutputTag = new TupleTag<Void>("sideOutput"){};
+    final TupleTag<Void> additionalOutputTag = new TupleTag<Void>("output"){};
 
     PCollectionView<Integer> sideInput1 = pipeline
         .apply("CreateSideInput1", Create.of(11))
@@ -668,7 +668,7 @@ public class ParDoTest implements Serializable {
             .withSideInputs(sideInput1)
             .withSideInputs(sideInputUnread)
             .withSideInputs(sideInput2)
-            .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag)));
+            .withOutputTags(mainOutputTag, TupleTagList.of(additionalOutputTag)));
 
     PAssert.that(outputs.get(mainOutputTag))
         .satisfies(ParDoTest.HasExpectedOutput
@@ -685,7 +685,7 @@ public class ParDoTest implements Serializable {
     List<Integer> inputs = Arrays.asList(3, -42, 666);
 
     final TupleTag<String> mainOutputTag = new TupleTag<String>("main"){};
-    final TupleTag<Void> sideOutputTag = new TupleTag<Void>("sideOutput"){};
+    final TupleTag<Void> additionalOutputTag = new TupleTag<Void>("output"){};
 
     PCollectionView<Integer> sideInput1 = pipeline
         .apply("CreateSideInput1", Create.of(11))
@@ -706,7 +706,7 @@ public class ParDoTest implements Serializable {
             .withSideInputs(sideInput1)
             .withSideInputs(sideInputUnread)
             .withSideInputs(sideInput2)
-            .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag)));
+            .withOutputTags(mainOutputTag, TupleTagList.of(additionalOutputTag)));
 
     PAssert.that(outputs.get(mainOutputTag))
         .satisfies(ParDoTest.HasExpectedOutput
@@ -853,37 +853,37 @@ public class ParDoTest implements Serializable {
   @Test
   public void testParDoMultiNameBasedDoFnWithTrimmerSuffix() {
     assertThat(
-        ParDo.of(new SideOutputDummyFn(null)).withOutputTags(null, null).getName(),
-        containsString("ParMultiDo(SideOutputDummy)"));
+        ParDo.of(new TaggedOutputDummyFn(null)).withOutputTags(null, null).getName(),
+        containsString("ParMultiDo(TaggedOutputDummy)"));
   }
 
   @Test
-  public void testParDoWithSideOutputsName() {
+  public void testParDoWithTaggedOutputName() {
     pipeline.enableAbandonedNodeEnforcement(false);
 
     TupleTag<String> mainOutputTag = new TupleTag<String>("main"){};
-    TupleTag<String> sideOutputTag1 = new TupleTag<String>("side1"){};
-    TupleTag<String> sideOutputTag2 = new TupleTag<String>("side2"){};
-    TupleTag<String> sideOutputTag3 = new TupleTag<String>("side3"){};
-    TupleTag<String> sideOutputTagUnwritten = new TupleTag<String>("sideUnwritten"){};
+    TupleTag<String> additionalOutputTag1 = new TupleTag<String>("output1"){};
+    TupleTag<String> additionalOutputTag2 = new TupleTag<String>("output2"){};
+    TupleTag<String> additionalOutputTag3 = new TupleTag<String>("output3"){};
+    TupleTag<String> additionalOutputTagUnwritten = new TupleTag<String>("unwrittenOutput"){};
 
     PCollectionTuple outputs = pipeline
         .apply(Create.of(Arrays.asList(3, -42, 666))).setName("MyInput")
         .apply("MyParDo", ParDo
                .of(new TestDoFn(
                    Arrays.<PCollectionView<Integer>>asList(),
-                   Arrays.asList(sideOutputTag1, sideOutputTag2, sideOutputTag3)))
+                   Arrays.asList(additionalOutputTag1, additionalOutputTag2, additionalOutputTag3)))
                .withOutputTags(
                    mainOutputTag,
-                   TupleTagList.of(sideOutputTag3).and(sideOutputTag1)
-                   .and(sideOutputTagUnwritten).and(sideOutputTag2)));
+                   TupleTagList.of(additionalOutputTag3).and(additionalOutputTag1)
+                   .and(additionalOutputTagUnwritten).and(additionalOutputTag2)));
 
     assertEquals("MyParDo.main", outputs.get(mainOutputTag).getName());
-    assertEquals("MyParDo.side1", outputs.get(sideOutputTag1).getName());
-    assertEquals("MyParDo.side2", outputs.get(sideOutputTag2).getName());
-    assertEquals("MyParDo.side3", outputs.get(sideOutputTag3).getName());
-    assertEquals("MyParDo.sideUnwritten",
-                 outputs.get(sideOutputTagUnwritten).getName());
+    assertEquals("MyParDo.output1", outputs.get(additionalOutputTag1).getName());
+    assertEquals("MyParDo.output2", outputs.get(additionalOutputTag2).getName());
+    assertEquals("MyParDo.output3", outputs.get(additionalOutputTag3).getName());
+    assertEquals("MyParDo.unwrittenOutput",
+                 outputs.get(additionalOutputTagUnwritten).getName());
   }
 
   @Test
@@ -892,29 +892,29 @@ public class ParDoTest implements Serializable {
     PCollection<Long> longs = pipeline.apply(CountingInput.unbounded());
 
     TupleTag<Long> mainOut = new TupleTag<>();
-    final TupleTag<String> sideOutOne = new TupleTag<>();
-    final TupleTag<Integer> sideOutTwo = new TupleTag<>();
+    final TupleTag<String> valueAsString = new TupleTag<>();
+    final TupleTag<Integer> valueAsInt = new TupleTag<>();
     DoFn<Long, Long> fn =
         new DoFn<Long, Long>() {
           @ProcessElement
           public void processElement(ProcessContext cxt) {
             cxt.output(cxt.element());
-            cxt.sideOutput(sideOutOne, Long.toString(cxt.element()));
-            cxt.sideOutput(sideOutTwo, Long.valueOf(cxt.element()).intValue());
+            cxt.output(valueAsString, Long.toString(cxt.element()));
+            cxt.output(valueAsInt, Long.valueOf(cxt.element()).intValue());
           }
         };
 
     ParDo.MultiOutput<Long, Long> parDo =
-        ParDo.of(fn).withOutputTags(mainOut, TupleTagList.of(sideOutOne).and(sideOutTwo));
+        ParDo.of(fn).withOutputTags(mainOut, TupleTagList.of(valueAsString).and(valueAsInt));
     PCollectionTuple firstApplication = longs.apply("first", parDo);
     PCollectionTuple secondApplication = longs.apply("second", parDo);
     assertThat(firstApplication, not(equalTo(secondApplication)));
     assertThat(
         firstApplication.getAll().keySet(),
-        Matchers.<TupleTag<?>>containsInAnyOrder(mainOut, sideOutOne, sideOutTwo));
+        Matchers.<TupleTag<?>>containsInAnyOrder(mainOut, valueAsString, valueAsInt));
     assertThat(
         secondApplication.getAll().keySet(),
-        Matchers.<TupleTag<?>>containsInAnyOrder(mainOut, sideOutOne, sideOutTwo));
+        Matchers.<TupleTag<?>>containsInAnyOrder(mainOut, valueAsString, valueAsInt));
   }
 
   @Test
@@ -1017,28 +1017,28 @@ public class ParDoTest implements Serializable {
     }
   }
 
-  private static class SideOutputDummyFn extends DoFn<Integer, Integer> {
-    private TupleTag<TestDummy> sideTag;
-    public SideOutputDummyFn(TupleTag<TestDummy> sideTag) {
-      this.sideTag = sideTag;
+  private static class TaggedOutputDummyFn extends DoFn<Integer, Integer> {
+    private TupleTag<TestDummy> dummyOutputTag;
+    public TaggedOutputDummyFn(TupleTag<TestDummy> dummyOutputTag) {
+      this.dummyOutputTag = dummyOutputTag;
     }
 
     @ProcessElement
     public void processElement(ProcessContext c) {
       c.output(1);
-      c.sideOutput(sideTag, new TestDummy());
+      c.output(dummyOutputTag, new TestDummy());
      }
   }
 
   private static class MainOutputDummyFn extends DoFn<Integer, TestDummy> {
-    private TupleTag<Integer> sideTag;
-    public MainOutputDummyFn(TupleTag<Integer> sideTag) {
-      this.sideTag = sideTag;
+    private TupleTag<Integer> intOutputTag;
+    public MainOutputDummyFn(TupleTag<Integer> intOutputTag) {
+      this.intOutputTag = intOutputTag;
     }
     @ProcessElement
     public void processElement(ProcessContext c) {
       c.output(new TestDummy());
-      c.sideOutput(sideTag, 1);
+      c.output(intOutputTag, 1);
      }
   }
 
@@ -1112,7 +1112,7 @@ public class ParDoTest implements Serializable {
       implements SerializableFunction<Iterable<String>, Void>, Serializable {
     private final List<Integer> inputs;
     private final List<Integer> sideInputs;
-    private final String sideOutput;
+    private final String additionalOutput;
     private final boolean ordered;
 
     public static HasExpectedOutput forInput(List<Integer> inputs) {
@@ -1125,11 +1125,11 @@ public class ParDoTest implements Serializable {
 
     private HasExpectedOutput(List<Integer> inputs,
                               List<Integer> sideInputs,
-                              String sideOutput,
+                              String additionalOutput,
                               boolean ordered) {
       this.inputs = inputs;
       this.sideInputs = sideInputs;
-      this.sideOutput = sideOutput;
+      this.additionalOutput = additionalOutput;
       this.ordered = ordered;
     }
 
@@ -1138,18 +1138,18 @@ public class ParDoTest implements Serializable {
       for (Integer sideInputValue : sideInputValues) {
         sideInputs.add(sideInputValue);
       }
-      return new HasExpectedOutput(inputs, sideInputs, sideOutput, ordered);
+      return new HasExpectedOutput(inputs, sideInputs, additionalOutput, ordered);
     }
 
-    public HasExpectedOutput fromSideOutput(TupleTag<String> sideOutputTag) {
-      return fromSideOutput(sideOutputTag.getId());
+    public HasExpectedOutput fromOutput(TupleTag<String> outputTag) {
+      return fromOutput(outputTag.getId());
     }
-    public HasExpectedOutput fromSideOutput(String sideOutput) {
-      return new HasExpectedOutput(inputs, sideInputs, sideOutput, ordered);
+    public HasExpectedOutput fromOutput(String outputId) {
+      return new HasExpectedOutput(inputs, sideInputs, outputId, ordered);
     }
 
     public HasExpectedOutput inOrder() {
-      return new HasExpectedOutput(inputs, sideInputs, sideOutput, true);
+      return new HasExpectedOutput(inputs, sideInputs, additionalOutput, true);
     }
 
     @Override
@@ -1174,17 +1174,17 @@ public class ParDoTest implements Serializable {
         sideInputsSuffix = ": " + sideInputs;
       }
 
-      String sideOutputPrefix;
-      if (sideOutput.isEmpty()) {
-        sideOutputPrefix = "";
+      String additionalOutputPrefix;
+      if (additionalOutput.isEmpty()) {
+        additionalOutputPrefix = "";
       } else {
-        sideOutputPrefix = sideOutput + ": ";
+        additionalOutputPrefix = additionalOutput + ": ";
       }
 
       List<String> expectedProcesseds = new ArrayList<>();
       for (Integer input : inputs) {
         expectedProcesseds.add(
-            sideOutputPrefix + "processing: " + input + sideInputsSuffix);
+            additionalOutputPrefix + "processing: " + input + sideInputsSuffix);
       }
       String[] expectedProcessedsArray =
           expectedProcesseds.toArray(new String[expectedProcesseds.size()]);
@@ -1196,10 +1196,10 @@ public class ParDoTest implements Serializable {
 
       assertEquals(starteds.size(), finisheds.size());
       for (String started : starteds) {
-        assertEquals(sideOutputPrefix + "started", started);
+        assertEquals(additionalOutputPrefix + "started", started);
       }
       for (String finished : finisheds) {
-        assertEquals(sideOutputPrefix + "finished", finished);
+        assertEquals(additionalOutputPrefix + "finished", finished);
       }
 
       return null;
@@ -1208,15 +1208,15 @@ public class ParDoTest implements Serializable {
 
   @Test
   @Category(NeedsRunner.class)
-  public void testSideOutputUnknownCoder() throws Exception {
+  public void testTaggedOutputUnknownCoder() throws Exception {
 
     PCollection<Integer> input = pipeline
         .apply(Create.of(Arrays.asList(1, 2, 3)));
 
     final TupleTag<Integer> mainOutputTag = new TupleTag<Integer>("main");
-    final TupleTag<TestDummy> sideOutputTag = new TupleTag<TestDummy>("unknownSide");
-    input.apply(ParDo.of(new SideOutputDummyFn(sideOutputTag))
-        .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag)));
+    final TupleTag<TestDummy> additionalOutputTag = new TupleTag<TestDummy>("unknownSide");
+    input.apply(ParDo.of(new TaggedOutputDummyFn(additionalOutputTag))
+        .withOutputTags(mainOutputTag, TupleTagList.of(additionalOutputTag)));
 
     thrown.expect(IllegalStateException.class);
     thrown.expectMessage("Unable to return a default Coder");
@@ -1224,26 +1224,27 @@ public class ParDoTest implements Serializable {
   }
 
   @Test
-  public void testSideOutputUnregisteredExplicitCoder() throws Exception {
+  public void testTaggedOutputUnregisteredExplicitCoder() throws Exception {
     pipeline.enableAbandonedNodeEnforcement(false);
 
     PCollection<Integer> input = pipeline
         .apply(Create.of(Arrays.asList(1, 2, 3)));
 
     final TupleTag<Integer> mainOutputTag = new TupleTag<Integer>("main");
-    final TupleTag<TestDummy> sideOutputTag = new TupleTag<TestDummy>("unregisteredSide");
-    ParDo.MultiOutput<Integer, Integer> pardo = ParDo.of(new SideOutputDummyFn(sideOutputTag))
-        .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag));
+    final TupleTag<TestDummy> additionalOutputTag = new TupleTag<TestDummy>("unregisteredSide");
+    ParDo.MultiOutput<Integer, Integer> pardo =
+        ParDo.of(new TaggedOutputDummyFn(additionalOutputTag))
+            .withOutputTags(mainOutputTag, TupleTagList.of(additionalOutputTag));
     PCollectionTuple outputTuple = input.apply(pardo);
 
-    outputTuple.get(sideOutputTag).setCoder(new TestDummyCoder());
+    outputTuple.get(additionalOutputTag).setCoder(new TestDummyCoder());
 
-    outputTuple.get(sideOutputTag).apply(View.<TestDummy>asSingleton());
+    outputTuple.get(additionalOutputTag).apply(View.<TestDummy>asSingleton());
 
-    assertEquals(new TestDummyCoder(), outputTuple.get(sideOutputTag).getCoder());
-    outputTuple.get(sideOutputTag).finishSpecifyingOutput(input, pardo); // Check for crashes
+    assertEquals(new TestDummyCoder(), outputTuple.get(additionalOutputTag).getCoder());
+    outputTuple.get(additionalOutputTag).finishSpecifyingOutput(input, pardo); // Check for crashes
     assertEquals(new TestDummyCoder(),
-        outputTuple.get(sideOutputTag).getCoder()); // Check for corruption
+        outputTuple.get(additionalOutputTag).getCoder()); // Check for corruption
   }
 
   @Test
@@ -1254,9 +1255,11 @@ public class ParDoTest implements Serializable {
         .apply(Create.of(Arrays.asList(1, 2, 3)));
 
     final TupleTag<TestDummy> mainOutputTag = new TupleTag<TestDummy>("unregisteredMain");
-    final TupleTag<Integer> sideOutputTag = new TupleTag<Integer>("side") {};
-    PCollectionTuple outputTuple = input.apply(ParDo.of(new MainOutputDummyFn(sideOutputTag))
-        .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag)));
+    final TupleTag<Integer> additionalOutputTag = new TupleTag<Integer>("additionalOutput") {};
+    PCollectionTuple outputTuple =
+        input.apply(
+            ParDo.of(new MainOutputDummyFn(additionalOutputTag))
+                .withOutputTags(mainOutputTag, TupleTagList.of(additionalOutputTag)));
 
     outputTuple.get(mainOutputTag).setCoder(new TestDummyCoder());
 
@@ -1265,13 +1268,13 @@ public class ParDoTest implements Serializable {
 
   @Test
   @Category(NeedsRunner.class)
-  public void testMainOutputApplySideOutputNoCoder() {
+  public void testMainOutputApplyTaggedOutputNoCoder() {
     // Regression test: applying a transform to the main output
     // should not cause a crash based on lack of a coder for the
-    // side output.
+    // additional output.
 
     final TupleTag<TestDummy> mainOutputTag = new TupleTag<TestDummy>("main");
-    final TupleTag<TestDummy> sideOutputTag = new TupleTag<TestDummy>("side");
+    final TupleTag<TestDummy> additionalOutputTag = new TupleTag<TestDummy>("additionalOutput");
     PCollectionTuple tuple = pipeline
         .apply(Create.of(new TestDummy())
             .withCoder(TestDummyCoder.of()))
@@ -1282,14 +1285,14 @@ public class ParDoTest implements Serializable {
                   public void processElement(ProcessContext context) {
                     TestDummy element = context.element();
                     context.output(element);
-                    context.sideOutput(sideOutputTag, element);
+                    context.output(additionalOutputTag, element);
                   }
                 })
-            .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag))
+            .withOutputTags(mainOutputTag, TupleTagList.of(additionalOutputTag))
         );
 
     // Before fix, tuple.get(mainOutputTag).apply(...) would indirectly trigger
-    // tuple.get(sideOutputTag).finishSpecifyingOutput(), which would crash
+    // tuple.get(additionalOutputTag).finishSpecifyingOutput(), which would crash
     // on a missing coder.
     tuple.get(mainOutputTag)
         .setCoder(TestDummyCoder.of())
@@ -1300,7 +1303,7 @@ public class ParDoTest implements Serializable {
           }
         }));
 
-    tuple.get(sideOutputTag).setCoder(TestDummyCoder.of());
+    tuple.get(additionalOutputTag).setCoder(TestDummyCoder.of());
 
     pipeline.run();
   }
@@ -1328,13 +1331,13 @@ public class ParDoTest implements Serializable {
 
   @Test
   @Category(NeedsRunner.class)
-  public void testParDoSideOutputWithTimestamp() {
+  public void testParDoTaggedOutputWithTimestamp() {
 
     PCollection<Integer> input =
         pipeline.apply(Create.of(Arrays.asList(3, 42, 6)));
 
     final TupleTag<Integer> mainOutputTag = new TupleTag<Integer>("main"){};
-    final TupleTag<Integer> sideOutputTag = new TupleTag<Integer>("side"){};
+    final TupleTag<Integer> additionalOutputTag = new TupleTag<Integer>("additional"){};
 
     PCollection<String> output =
         input
@@ -1342,11 +1345,11 @@ public class ParDoTest implements Serializable {
             new DoFn<Integer, Integer>() {
               @ProcessElement
               public void processElement(ProcessContext c) {
-                c.sideOutputWithTimestamp(
-                    sideOutputTag, c.element(), new Instant(c.element().longValue()));
+                c.outputWithTimestamp(
+                    additionalOutputTag, c.element(), new Instant(c.element().longValue()));
               }
-            }).withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag)))
-        .get(sideOutputTag)
+            }).withOutputTags(mainOutputTag, TupleTagList.of(additionalOutputTag)))
+        .get(additionalOutputTag)
         .apply(ParDo.of(new TestShiftTimestampDoFn<Integer>(Duration.ZERO, Duration.ZERO)))
         .apply(ParDo.of(new TestFormatTimestampDoFn<Integer>()));
 
@@ -1914,7 +1917,7 @@ public class ParDoTest implements Serializable {
 
   @Test
   @Category({ValidatesRunner.class, UsesStatefulParDo.class})
-  public void testValueStateSideOutput() {
+  public void testValueStateTaggedOutput() {
     final String stateId = "foo";
 
     final TupleTag<Integer> evenTag = new TupleTag<Integer>() {};
@@ -1934,7 +1937,7 @@ public class ParDoTest implements Serializable {
             if (currentValue % 2 == 0) {
               c.output(currentValue);
             } else {
-              c.sideOutput(oddTag, currentValue);
+              c.output(oddTag, currentValue);
             }
             state.write(currentValue + 1);
           }

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SplittableDoFnTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SplittableDoFnTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SplittableDoFnTest.java
index a122f67..9e8c12e 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SplittableDoFnTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SplittableDoFnTest.java
@@ -218,12 +218,12 @@ public class SplittableDoFnTest {
 
   private static class SDFWithSideInputsAndOutputs extends DoFn<Integer, String> {
     private final PCollectionView<String> sideInput;
-    private final TupleTag<String> sideOutput;
+    private final TupleTag<String> additionalOutput;
 
     private SDFWithSideInputsAndOutputs(
-        PCollectionView<String> sideInput, TupleTag<String> sideOutput) {
+        PCollectionView<String> sideInput, TupleTag<String> additionalOutput) {
       this.sideInput = sideInput;
-      this.sideOutput = sideOutput;
+      this.additionalOutput = additionalOutput;
     }
 
     @ProcessElement
@@ -231,7 +231,7 @@ public class SplittableDoFnTest {
       checkState(tracker.tryClaim(tracker.currentRestriction().getFrom()));
       String side = c.sideInput(sideInput);
       c.output("main:" + side + ":" + c.element());
-      c.sideOutput(sideOutput, "side:" + side + ":" + c.element());
+      c.output(additionalOutput, "additional:" + side + ":" + c.element());
     }
 
     @GetInitialRestriction
@@ -247,21 +247,22 @@ public class SplittableDoFnTest {
     PCollectionView<String> sideInput =
         p.apply("side input", Create.of("foo")).apply(View.<String>asSingleton());
     TupleTag<String> mainOutputTag = new TupleTag<>("main");
-    TupleTag<String> sideOutputTag = new TupleTag<>("side");
+    TupleTag<String> additionalOutputTag = new TupleTag<>("additional");
 
     PCollectionTuple res =
         p.apply("input", Create.of(0, 1, 2))
             .apply(
-                ParDo.of(new SDFWithSideInputsAndOutputs(sideInput, sideOutputTag))
+                ParDo.of(new SDFWithSideInputsAndOutputs(sideInput, additionalOutputTag))
                     .withSideInputs(sideInput)
-                    .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag)));
+                    .withOutputTags(mainOutputTag, TupleTagList.of(additionalOutputTag)));
     res.get(mainOutputTag).setCoder(StringUtf8Coder.of());
-    res.get(sideOutputTag).setCoder(StringUtf8Coder.of());
+    res.get(additionalOutputTag).setCoder(StringUtf8Coder.of());
 
     PAssert.that(res.get(mainOutputTag))
         .containsInAnyOrder(Arrays.asList("main:foo:0", "main:foo:1", "main:foo:2"));
-    PAssert.that(res.get(sideOutputTag))
-        .containsInAnyOrder(Arrays.asList("side:foo:0", "side:foo:1", "side:foo:2"));
+    PAssert.that(res.get(additionalOutputTag))
+        .containsInAnyOrder(
+            Arrays.asList("additional:foo:0", "additional:foo:1", "additional:foo:2"));
 
     p.run();
   }

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
index 0a0abd6..9df0512 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java
@@ -82,7 +82,7 @@ public final class PCollectionTupleTest implements Serializable {
 
     TupleTag<Integer> mainOutputTag = new TupleTag<Integer>("main") {};
     TupleTag<Integer> emptyOutputTag = new TupleTag<Integer>("empty") {};
-    final TupleTag<Integer> sideOutputTag = new TupleTag<Integer>("side") {};
+    final TupleTag<Integer> additionalOutputTag = new TupleTag<Integer>("extra") {};
 
     PCollection<Integer> mainInput = pipeline
         .apply(Create.of(inputs));
@@ -91,14 +91,14 @@ public final class PCollectionTupleTest implements Serializable {
         .of(new DoFn<Integer, Integer>() {
           @ProcessElement
           public void processElement(ProcessContext c) {
-            c.sideOutput(sideOutputTag, c.element());
+            c.output(additionalOutputTag, c.element());
           }})
-        .withOutputTags(emptyOutputTag, TupleTagList.of(sideOutputTag)));
+        .withOutputTags(emptyOutputTag, TupleTagList.of(additionalOutputTag)));
     assertNotNull("outputs.getPipeline()", outputs.getPipeline());
     outputs = outputs.and(mainOutputTag, mainInput);
 
     PAssert.that(outputs.get(mainOutputTag)).containsInAnyOrder(inputs);
-    PAssert.that(outputs.get(sideOutputTag)).containsInAnyOrder(inputs);
+    PAssert.that(outputs.get(additionalOutputTag)).containsInAnyOrder(inputs);
     PAssert.that(outputs.get(emptyOutputTag)).empty();
 
     pipeline.run();


[38/50] [abbrv] beam git commit: This closes #2560

Posted by ke...@apache.org.
This closes #2560


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/36e43558
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/36e43558
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/36e43558

Branch: refs/heads/jstorm-runner
Commit: 36e43558a2849204b4a9d077c542fa25e75c3fcb
Parents: 9b0cc98 3bbdbce
Author: Ahmet Altay <al...@google.com>
Authored: Mon Apr 17 18:41:27 2017 -0700
Committer: Ahmet Altay <al...@google.com>
Committed: Mon Apr 17 18:41:27 2017 -0700

----------------------------------------------------------------------
 sdks/python/apache_beam/utils/pipeline_options.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------



[45/50] [abbrv] beam git commit: Enable flink dependency enforcement and make dependencies explicit

Posted by ke...@apache.org.
Enable flink dependency enforcement and make dependencies explicit


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/f654ff47
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/f654ff47
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/f654ff47

Branch: refs/heads/jstorm-runner
Commit: f654ff471c52f6bba7fb34ac7b8b4f5768af17c0
Parents: e556858
Author: Isma�l Mej�a <ie...@apache.org>
Authored: Sun Apr 16 18:02:12 2017 +0200
Committer: Isma�l Mej�a <ie...@apache.org>
Committed: Tue Apr 18 16:12:47 2017 +0200

----------------------------------------------------------------------
 runners/flink/pom.xml                           |  45 +++++---
 runners/flink/runner/pom.xml                    | 115 +++++++++++--------
 .../flink/FlinkDetachedRunnerResult.java        |   3 +-
 .../beam/runners/flink/PipelineOptionsTest.java |   2 +-
 4 files changed, 101 insertions(+), 64 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/f654ff47/runners/flink/pom.xml
----------------------------------------------------------------------
diff --git a/runners/flink/pom.xml b/runners/flink/pom.xml
index dcf5ff7..a5c5ea0 100644
--- a/runners/flink/pom.xml
+++ b/runners/flink/pom.xml
@@ -42,20 +42,6 @@
     <flink.version>1.2.0</flink.version>
   </properties>
 
-  <repositories>
-    <repository>
-      <id>apache.snapshots</id>
-      <name>Apache Development Snapshot Repository</name>
-      <url>https://repository.apache.org/content/repositories/snapshots/</url>
-      <releases>
-        <enabled>false</enabled>
-      </releases>
-      <snapshots>
-        <enabled>true</enabled>
-      </snapshots>
-    </repository>
-  </repositories>
-
   <build>
     <pluginManagement>
       <plugins>
@@ -95,10 +81,37 @@
             </execution>
           </executions>
         </plugin>
-
       </plugins>
     </pluginManagement>
-
   </build>
 
+  <dependencies>
+    <!-- Flink dependencies -->
+    <dependency>
+      <groupId>org.apache.flink</groupId>
+      <artifactId>flink-streaming-java_2.10</artifactId>
+      <version>${flink.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.flink</groupId>
+      <artifactId>flink-core</artifactId>
+      <version>${flink.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.beam</groupId>
+      <artifactId>beam-sdks-java-core</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>joda-time</groupId>
+      <artifactId>joda-time</artifactId>
+    </dependency>
+  </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/beam/blob/f654ff47/runners/flink/runner/pom.xml
----------------------------------------------------------------------
diff --git a/runners/flink/runner/pom.xml b/runners/flink/runner/pom.xml
index 70cd246..30f376c 100644
--- a/runners/flink/runner/pom.xml
+++ b/runners/flink/runner/pom.xml
@@ -115,21 +115,54 @@
     </profile>
   </profiles>
 
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+      </plugin>
+
+      <!-- Integration Tests -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-failsafe-plugin</artifactId>
+      </plugin>
+
+      <!-- Unit Tests -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+      </plugin>
+    </plugins>
+  </build>
+
   <dependencies>
     <!-- Flink dependencies -->
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-streaming-java_2.10</artifactId>
+      <artifactId>flink-java</artifactId>
       <version>${flink.version}</version>
     </dependency>
+
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-java</artifactId>
+      <artifactId>flink-clients_2.10</artifactId>
       <version>${flink.version}</version>
     </dependency>
+
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-clients_2.10</artifactId>
+      <artifactId>flink-runtime_2.10</artifactId>
+      <version>${flink.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.flink</groupId>
+      <artifactId>flink-annotations</artifactId>
       <version>${flink.version}</version>
     </dependency>
 
@@ -150,7 +183,6 @@
       <scope>test</scope>
     </dependency>
 
-
     <!-- Beam -->
     <dependency>
       <groupId>org.apache.beam</groupId>
@@ -185,6 +217,26 @@
       </exclusions>
     </dependency>
 
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-annotations</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>com.google.code.findbugs</groupId>
+      <artifactId>jsr305</artifactId>
+    </dependency>
+
     <!--
     Force an upgrade on the version of Apache Commons from Flink to support DEFLATE compression.
     -->
@@ -192,20 +244,34 @@
       <groupId>org.apache.commons</groupId>
       <artifactId>commons-compress</artifactId>
       <version>[1.9,)</version>
+      <scope>runtime</scope>
     </dependency>
 
     <!-- Test scoped -->
+    <dependency>
+      <groupId>com.google.apis</groupId>
+      <artifactId>google-api-services-bigquery</artifactId>
+      <scope>test</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <scope>test</scope>
+    </dependency>
 
     <dependency>
       <groupId>org.hamcrest</groupId>
       <artifactId>hamcrest-all</artifactId>
       <scope>test</scope>
     </dependency>
+
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+
     <dependency>
       <groupId>org.mockito</groupId>
       <artifactId>mockito-all</artifactId>
@@ -267,45 +333,4 @@
       <scope>test</scope>
     </dependency>
   </dependencies>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <execution>
-            <goals><goal>analyze-only</goal></goals>
-            <configuration>
-              <!-- disable for now until dependencies are cleaned up -->
-              <failOnWarning>false</failOnWarning>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-
-      <!-- Integration Tests -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-failsafe-plugin</artifactId>
-      </plugin>
-
-      <!-- Unit Tests -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-      </plugin>
-
-    </plugins>
-  </build>
-
 </project>

http://git-wip-us.apache.org/repos/asf/beam/blob/f654ff47/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkDetachedRunnerResult.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkDetachedRunnerResult.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkDetachedRunnerResult.java
index 6adcf07..bf4395f 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkDetachedRunnerResult.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkDetachedRunnerResult.java
@@ -24,7 +24,6 @@ import org.apache.beam.sdk.AggregatorValues;
 import org.apache.beam.sdk.PipelineResult;
 import org.apache.beam.sdk.metrics.MetricResults;
 import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.commons.lang.NotImplementedException;
 import org.joda.time.Duration;
 
 
@@ -46,7 +45,7 @@ public class FlinkDetachedRunnerResult implements PipelineResult {
       throws AggregatorRetrievalException {
     throw new AggregatorRetrievalException(
         "Accumulators can't be retrieved for detached Job executions.",
-        new NotImplementedException());
+        new UnsupportedOperationException());
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/f654ff47/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
index 2cb3dd3..06187f6 100644
--- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
+++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java
@@ -38,7 +38,7 @@ import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.util.WindowingStrategy;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
-import org.apache.commons.lang.SerializationUtils;
+import org.apache.commons.lang3.SerializationUtils;
 import org.apache.flink.api.common.ExecutionConfig;
 import org.apache.flink.api.common.typeinfo.TypeHint;
 import org.apache.flink.api.common.typeinfo.TypeInformation;


[34/50] [abbrv] beam git commit: This closes #2546

Posted by ke...@apache.org.
This closes #2546


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/a25c7d3c
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/a25c7d3c
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/a25c7d3c

Branch: refs/heads/jstorm-runner
Commit: a25c7d3cf11275c024fe7515c08ca8539a97a944
Parents: 4ff244d 8330e15
Author: Dan Halperin <dh...@google.com>
Authored: Mon Apr 17 17:40:53 2017 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Mon Apr 17 17:40:53 2017 -0700

----------------------------------------------------------------------
 sdks/java/io/hadoop/pom.xml | 12 +++++++++++-
 1 file changed, 11 insertions(+), 1 deletion(-)
----------------------------------------------------------------------



[10/50] [abbrv] beam git commit: Update Signature of PTransformOverrideFactory

Posted by ke...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
index 684dc14..4eec6b8 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java
@@ -61,6 +61,7 @@ import java.util.TreeSet;
 import org.apache.beam.runners.core.construction.DeduplicatedFlattenFactory;
 import org.apache.beam.runners.core.construction.EmptyFlattenAsCreateFactory;
 import org.apache.beam.runners.core.construction.PTransformMatchers;
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.runners.core.construction.ReplacementOutputs;
 import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
 import org.apache.beam.runners.core.construction.UnboundedReadFromBoundedSource;
@@ -96,6 +97,7 @@ import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.runners.TransformHierarchy;
 import org.apache.beam.sdk.runners.TransformHierarchy.Node;
 import org.apache.beam.sdk.transforms.Aggregator;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.Combine.GroupedValues;
 import org.apache.beam.sdk.transforms.DoFn;
@@ -390,25 +392,29 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
   }
 
   private static class ReflectiveOneToOneOverrideFactory<
-          InputT extends PValue,
-          OutputT extends PValue,
-          TransformT extends PTransform<InputT, OutputT>>
-      extends SingleInputOutputOverrideFactory<InputT, OutputT, TransformT> {
-    private final Class<PTransform<InputT, OutputT>> replacement;
+          InputT, OutputT, TransformT extends PTransform<PCollection<InputT>, PCollection<OutputT>>>
+      extends SingleInputOutputOverrideFactory<
+          PCollection<InputT>, PCollection<OutputT>, TransformT> {
+    private final Class<PTransform<PCollection<InputT>, PCollection<OutputT>>> replacement;
     private final DataflowRunner runner;
 
     private ReflectiveOneToOneOverrideFactory(
-        Class<PTransform<InputT, OutputT>> replacement, DataflowRunner runner) {
+        Class<PTransform<PCollection<InputT>, PCollection<OutputT>>> replacement,
+        DataflowRunner runner) {
       this.replacement = replacement;
       this.runner = runner;
     }
 
     @Override
-    public PTransform<InputT, OutputT> getReplacementTransform(TransformT transform) {
-      return InstanceBuilder.ofType(replacement)
-          .withArg(DataflowRunner.class, runner)
-          .withArg((Class<PTransform<InputT, OutputT>>) transform.getClass(), transform)
-          .build();
+    public PTransformReplacement<PCollection<InputT>, PCollection<OutputT>> getReplacementTransform(
+        AppliedPTransform<PCollection<InputT>, PCollection<OutputT>, TransformT> transform) {
+      PTransform<PCollection<InputT>, PCollection<OutputT>> rep =
+          InstanceBuilder.ofType(replacement)
+              .withArg(DataflowRunner.class, runner)
+              .withArg(
+                  (Class<TransformT>) transform.getTransform().getClass(), transform.getTransform())
+              .build();
+      return PTransformReplacement.of(PTransformReplacements.getSingletonMainInput(transform), rep);
     }
   }
 
@@ -423,19 +429,18 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
       this.replacement = replacement;
       this.runner = runner;
     }
-    @Override
-    public PTransform<PBegin, PCollection<T>> getReplacementTransform(
-        PTransform<PInput, PCollection<T>> transform) {
-      return InstanceBuilder.ofType(replacement)
-          .withArg(DataflowRunner.class, runner)
-          .withArg(
-              (Class<? super PTransform<PInput, PCollection<T>>>) transform.getClass(), transform)
-          .build();
-    }
 
     @Override
-    public PBegin getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-      return p.begin();
+    public PTransformReplacement<PBegin, PCollection<T>> getReplacementTransform(
+        AppliedPTransform<PBegin, PCollection<T>, PTransform<PInput, PCollection<T>>> transform) {
+      PTransform<PInput, PCollection<T>> original = transform.getTransform();
+      return PTransformReplacement.of(
+          transform.getPipeline().begin(),
+          InstanceBuilder.ofType(replacement)
+              .withArg(DataflowRunner.class, runner)
+              .withArg(
+                  (Class<? super PTransform<PInput, PCollection<T>>>) original.getClass(), original)
+              .build());
     }
 
     @Override
@@ -805,13 +810,11 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
     }
 
     @Override
-    public PTransform<PCollection<T>, PDone> getReplacementTransform(Write<T> transform) {
-      return new BatchWrite<>(runner, transform);
-    }
-
-    @Override
-    public PCollection<T> getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-      return (PCollection<T>) Iterables.getOnlyElement(inputs.values());
+    public PTransformReplacement<PCollection<T>, PDone> getReplacementTransform(
+        AppliedPTransform<PCollection<T>, PDone, Write<T>> transform) {
+      return PTransformReplacement.of(
+          PTransformReplacements.getSingletonMainInput(transform),
+          new BatchWrite<>(runner, transform.getTransform()));
     }
 
     @Override
@@ -1295,15 +1298,15 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
           PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>,
           Combine.GroupedValues<K, InputT, OutputT>> {
     @Override
-    public PTransform<PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>>
-        getReplacementTransform(GroupedValues<K, InputT, OutputT> transform) {
-      return new CombineGroupedValues<>(transform);
-    }
-
-    @Override
-    public PCollection<KV<K, Iterable<InputT>>> getInput(
-        Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-      return (PCollection<KV<K, Iterable<InputT>>>) Iterables.getOnlyElement(inputs.values());
+    public PTransformReplacement<PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>>
+        getReplacementTransform(
+            AppliedPTransform<
+                    PCollection<KV<K, Iterable<InputT>>>, PCollection<KV<K, OutputT>>,
+                    GroupedValues<K, InputT, OutputT>>
+                transform) {
+      return PTransformReplacement.of(
+          PTransformReplacements.getSingletonMainInput(transform),
+          new CombineGroupedValues<>(transform.getTransform()));
     }
 
     @Override
@@ -1322,14 +1325,11 @@ public class DataflowRunner extends PipelineRunner<DataflowPipelineJob> {
     }
 
     @Override
-    public PTransform<PCollection<T>, PDone> getReplacementTransform(
-        PubsubUnboundedSink<T> transform) {
-      return new StreamingPubsubIOWrite<>(runner, transform);
-    }
-
-    @Override
-    public PCollection<T> getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-      return (PCollection<T>) Iterables.getOnlyElement(inputs.values());
+    public PTransformReplacement<PCollection<T>, PDone> getReplacementTransform(
+        AppliedPTransform<PCollection<T>, PDone, PubsubUnboundedSink<T>> transform) {
+      return PTransformReplacement.of(
+          PTransformReplacements.getSingletonMainInput(transform),
+          new StreamingPubsubIOWrite<>(runner, transform.getTransform()));
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/PrimitiveParDoSingleFactory.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/PrimitiveParDoSingleFactory.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/PrimitiveParDoSingleFactory.java
index db50cc2..2e50cb5 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/PrimitiveParDoSingleFactory.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/PrimitiveParDoSingleFactory.java
@@ -20,12 +20,15 @@ package org.apache.beam.runners.dataflow;
 
 import java.util.List;
 import org.apache.beam.runners.core.construction.ForwardingPTransform;
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
 import org.apache.beam.sdk.common.runner.v1.RunnerApi.DisplayData;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.ParDo.SingleOutput;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionView;
 
@@ -38,9 +41,15 @@ public class PrimitiveParDoSingleFactory<InputT, OutputT>
     extends SingleInputOutputOverrideFactory<
         PCollection<? extends InputT>, PCollection<OutputT>, ParDo.SingleOutput<InputT, OutputT>> {
   @Override
-  public PTransform<PCollection<? extends InputT>, PCollection<OutputT>> getReplacementTransform(
-      ParDo.SingleOutput<InputT, OutputT> transform) {
-    return new ParDoSingle<>(transform);
+  public PTransformReplacement<PCollection<? extends InputT>, PCollection<OutputT>>
+      getReplacementTransform(
+          AppliedPTransform<
+                  PCollection<? extends InputT>, PCollection<OutputT>,
+                  SingleOutput<InputT, OutputT>>
+              transform) {
+    return PTransformReplacement.of(
+        PTransformReplacements.getSingletonMainInput(transform),
+        new ParDoSingle<>(transform.getTransform()));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/ReshuffleOverrideFactory.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/ReshuffleOverrideFactory.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/ReshuffleOverrideFactory.java
index 2e6455d..aa9d9f8 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/ReshuffleOverrideFactory.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/ReshuffleOverrideFactory.java
@@ -18,8 +18,10 @@
 
 package org.apache.beam.runners.dataflow;
 
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.PTransform;
@@ -43,9 +45,13 @@ class ReshuffleOverrideFactory<K, V>
     extends SingleInputOutputOverrideFactory<
         PCollection<KV<K, V>>, PCollection<KV<K, V>>, Reshuffle<K, V>> {
   @Override
-  public PTransform<PCollection<KV<K, V>>, PCollection<KV<K, V>>> getReplacementTransform(
-      Reshuffle<K, V> transform) {
-    return new ReshuffleWithOnlyTrigger<>();
+  public PTransformReplacement<PCollection<KV<K, V>>, PCollection<KV<K, V>>>
+      getReplacementTransform(
+          AppliedPTransform<PCollection<KV<K, V>>, PCollection<KV<K, V>>, Reshuffle<K, V>>
+              transform) {
+    return PTransformReplacement.of(
+        PTransformReplacements.getSingletonMainInput(transform),
+        new ReshuffleWithOnlyTrigger<K, V>());
   }
 
   private static class ReshuffleWithOnlyTrigger<K, V>

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/StreamingViewOverrides.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/StreamingViewOverrides.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/StreamingViewOverrides.java
index c407517..eb385de 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/StreamingViewOverrides.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/StreamingViewOverrides.java
@@ -20,11 +20,13 @@ package org.apache.beam.runners.dataflow;
 
 import java.util.ArrayList;
 import java.util.List;
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
 import org.apache.beam.runners.dataflow.DataflowRunner.StreamingPCollectionViewWriterFn;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.CoderRegistry;
 import org.apache.beam.sdk.coders.ListCoder;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
 import org.apache.beam.sdk.transforms.PTransform;
@@ -42,9 +44,15 @@ class StreamingViewOverrides {
       extends SingleInputOutputOverrideFactory<
           PCollection<ElemT>, PCollectionView<ViewT>, CreatePCollectionView<ElemT, ViewT>> {
     @Override
-    public PTransform<PCollection<ElemT>, PCollectionView<ViewT>> getReplacementTransform(
-        final CreatePCollectionView<ElemT, ViewT> transform) {
-      return new StreamingCreatePCollectionView<>(transform.getView());
+    public PTransformReplacement<PCollection<ElemT>, PCollectionView<ViewT>>
+        getReplacementTransform(
+            AppliedPTransform<
+                    PCollection<ElemT>, PCollectionView<ViewT>, CreatePCollectionView<ElemT, ViewT>>
+                transform) {
+      StreamingCreatePCollectionView<ElemT, ViewT> streamingView =
+          new StreamingCreatePCollectionView<>(transform.getTransform().getView());
+      return PTransformReplacement.of(
+          PTransformReplacements.getSingletonMainInput(transform), streamingView);
     }
 
     private static class StreamingCreatePCollectionView<ElemT, ViewT>

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/PrimitiveParDoSingleFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/PrimitiveParDoSingleFactoryTest.java b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/PrimitiveParDoSingleFactoryTest.java
index bff46ea..e320036 100644
--- a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/PrimitiveParDoSingleFactoryTest.java
+++ b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/PrimitiveParDoSingleFactoryTest.java
@@ -27,10 +27,11 @@ import java.io.Serializable;
 import java.util.List;
 import org.apache.beam.runners.dataflow.PrimitiveParDoSingleFactory.ParDoSingle;
 import org.apache.beam.sdk.coders.VarIntCoder;
+import org.apache.beam.sdk.runners.PTransformOverrideFactory.PTransformReplacement;
 import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.transforms.View;
@@ -64,17 +65,27 @@ public class PrimitiveParDoSingleFactoryTest implements Serializable {
   public void getReplacementTransformPopulateDisplayData() {
     ParDo.SingleOutput<Integer, Long> originalTransform = ParDo.of(new ToLongFn());
     DisplayData originalDisplayData = DisplayData.from(originalTransform);
-
-    PTransform<PCollection<? extends Integer>, PCollection<Long>> replacement =
-        factory.getReplacementTransform(originalTransform);
-    DisplayData replacementDisplayData = DisplayData.from(replacement);
+    PCollection<? extends Integer> input = pipeline.apply(Create.of(1, 2, 3));
+    AppliedPTransform<
+        PCollection<? extends Integer>, PCollection<Long>, ParDo.SingleOutput<Integer, Long>>
+        application =
+        AppliedPTransform.of(
+            "original",
+            input.expand(),
+            input.apply(originalTransform).expand(),
+            originalTransform,
+            pipeline);
+
+    PTransformReplacement<PCollection<? extends Integer>, PCollection<Long>> replacement =
+        factory.getReplacementTransform(application);
+    DisplayData replacementDisplayData = DisplayData.from(replacement.getTransform());
 
     assertThat(replacementDisplayData, equalTo(originalDisplayData));
 
     DisplayData primitiveDisplayData =
         Iterables.getOnlyElement(
             DisplayDataEvaluator.create()
-                .displayDataForPrimitiveTransforms(replacement, VarIntCoder.of()));
+                .displayDataForPrimitiveTransforms(replacement.getTransform(), VarIntCoder.of()));
     assertThat(primitiveDisplayData, equalTo(replacementDisplayData));
   }
 
@@ -91,9 +102,21 @@ public class PrimitiveParDoSingleFactoryTest implements Serializable {
     ParDo.SingleOutput<Integer, Long> originalTransform =
         ParDo.of(new ToLongFn()).withSideInputs(sideLong, sideStrings);
 
-    PTransform<PCollection<? extends Integer>, PCollection<Long>> replacementTransform =
-        factory.getReplacementTransform(originalTransform);
-    ParDoSingle<Integer, Long> parDoSingle = (ParDoSingle<Integer, Long>) replacementTransform;
+    PCollection<? extends Integer> input = pipeline.apply(Create.of(1, 2, 3));
+    AppliedPTransform<
+        PCollection<? extends Integer>, PCollection<Long>, ParDo.SingleOutput<Integer, Long>>
+        application =
+        AppliedPTransform.of(
+            "original",
+            input.expand(),
+            input.apply(originalTransform).expand(),
+            originalTransform,
+            pipeline);
+
+    PTransformReplacement<PCollection<? extends Integer>, PCollection<Long>> replacementTransform =
+        factory.getReplacementTransform(application);
+    ParDoSingle<Integer, Long> parDoSingle =
+        (ParDoSingle<Integer, Long>) replacementTransform.getTransform();
     assertThat(parDoSingle.getSideInputs(), containsInAnyOrder(sideStrings, sideLong));
   }
 
@@ -101,9 +124,21 @@ public class PrimitiveParDoSingleFactoryTest implements Serializable {
   public void getReplacementTransformGetFn() {
     DoFn<Integer, Long> originalFn = new ToLongFn();
     ParDo.SingleOutput<Integer, Long> originalTransform = ParDo.of(originalFn);
-    PTransform<PCollection<? extends Integer>, PCollection<Long>> replacementTransform =
-        factory.getReplacementTransform(originalTransform);
-    ParDoSingle<Integer, Long> parDoSingle = (ParDoSingle<Integer, Long>) replacementTransform;
+    PCollection<? extends Integer> input = pipeline.apply(Create.of(1, 2, 3));
+    AppliedPTransform<
+            PCollection<? extends Integer>, PCollection<Long>, ParDo.SingleOutput<Integer, Long>>
+        application =
+            AppliedPTransform.of(
+                "original",
+                input.expand(),
+                input.apply(originalTransform).expand(),
+                originalTransform,
+                pipeline);
+
+    PTransformReplacement<PCollection<? extends Integer>, PCollection<Long>> replacementTransform =
+        factory.getReplacementTransform(application);
+    ParDoSingle<Integer, Long> parDoSingle =
+        (ParDoSingle<Integer, Long>) replacementTransform.getTransform();
 
     assertThat(parDoSingle.getFn(), equalTo(originalTransform.getFn()));
     assertThat(parDoSingle.getFn(), equalTo(originalFn));

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkRunner.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkRunner.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkRunner.java
index aacb942..61fcaa9 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkRunner.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/TestSparkRunner.java
@@ -46,6 +46,7 @@ import org.apache.beam.sdk.runners.PTransformOverride;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
 import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.testing.PAssert;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.util.ValueWithRecordId;
@@ -244,14 +245,11 @@ public final class TestSparkRunner extends PipelineRunner<SparkPipelineResult> {
         implements PTransformOverrideFactory<
             PBegin, PCollection<T>, BoundedReadFromUnboundedSource<T>> {
       @Override
-      public PTransform<PBegin, PCollection<T>> getReplacementTransform(
-          BoundedReadFromUnboundedSource<T> transform) {
-        return new AdaptedBoundedAsUnbounded<>(transform);
-      }
-
-      @Override
-      public PBegin getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-        return p.begin();
+      public PTransformReplacement<PBegin, PCollection<T>> getReplacementTransform(
+          AppliedPTransform<PBegin, PCollection<T>, BoundedReadFromUnboundedSource<T>> transform) {
+        return PTransformReplacement.of(
+            transform.getPipeline().begin(),
+            new AdaptedBoundedAsUnbounded<T>(transform.getTransform()));
       }
 
       @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java
index 791166e..1ff4c30 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java
@@ -33,11 +33,13 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.runners.PTransformOverride;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.runners.PTransformOverrideFactory.PTransformReplacement;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory.ReplacementOutput;
 import org.apache.beam.sdk.runners.PipelineRunner;
 import org.apache.beam.sdk.runners.TransformHierarchy;
 import org.apache.beam.sdk.runners.TransformHierarchy.Node;
 import org.apache.beam.sdk.transforms.Aggregator;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.util.UserCodeException;
@@ -497,17 +499,18 @@ public class Pipeline {
       void applyReplacement(
           Node original,
           PTransformOverrideFactory<InputT, OutputT, TransformT> replacementFactory) {
-    PTransform<InputT, OutputT> replacement =
-        replacementFactory.getReplacementTransform((TransformT) original.getTransform());
-    if (replacement == original.getTransform()) {
+    PTransformReplacement<InputT, OutputT> replacement =
+        replacementFactory.getReplacementTransform(
+            (AppliedPTransform<InputT, OutputT, TransformT>) original.toAppliedPTransform());
+    if (replacement.getTransform() == original.getTransform()) {
       return;
     }
-    InputT originalInput = replacementFactory.getInput(original.getInputs(), this);
+    InputT originalInput = replacement.getInput();
 
     LOG.debug("Replacing {} with {}", original, replacement);
-    transforms.replaceNode(original, originalInput, replacement);
+    transforms.replaceNode(original, originalInput, replacement.getTransform());
     try {
-      OutputT newOutput = replacement.expand(originalInput);
+      OutputT newOutput = replacement.getTransform().expand(originalInput);
       Map<PValue, ReplacementOutput> originalToReplacement =
           replacementFactory.mapOutputs(original.getOutputs(), newOutput);
       // Ensure the internal TransformHierarchy data structures are consistent.

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PTransformOverrideFactory.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PTransformOverrideFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PTransformOverrideFactory.java
index 57cba50..786c61c 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PTransformOverrideFactory.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PTransformOverrideFactory.java
@@ -21,9 +21,9 @@ package org.apache.beam.sdk.runners;
 
 import com.google.auto.value.AutoValue;
 import java.util.Map;
-import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.annotations.Experimental.Kind;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.values.PInput;
 import org.apache.beam.sdk.values.POutput;
@@ -41,14 +41,11 @@ public interface PTransformOverrideFactory<
     OutputT extends POutput,
     TransformT extends PTransform<? super InputT, OutputT>> {
   /**
-   * Returns a {@link PTransform} that produces equivalent output to the provided transform.
+   * Returns a {@link PTransform} that produces equivalent output to the provided {@link
+   * AppliedPTransform transform}.
    */
-  PTransform<InputT, OutputT> getReplacementTransform(TransformT transform);
-
-  /**
-   * Returns the composite type that replacement transforms consumed from an equivalent expansion.
-   */
-  InputT getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p);
+  PTransformReplacement<InputT, OutputT> getReplacementTransform(
+      AppliedPTransform<InputT, OutputT, TransformT> transform);
 
   /**
    * Returns a {@link Map} from the expanded values in {@code newOutput} to the values produced by
@@ -56,7 +53,25 @@ public interface PTransformOverrideFactory<
    */
   Map<PValue, ReplacementOutput> mapOutputs(Map<TupleTag<?>, PValue> outputs, OutputT newOutput);
 
-  /** A mapping between original {@link TaggedPValue} outputs and their replacements. */
+  /**
+   * A {@link PTransform} that replaces an {@link AppliedPTransform}, and the input required to
+   * do so. The input must be constructed from the expanded form, as the transform may not have
+   * originally been applied within this process or from within a Java SDK.
+   */
+  @AutoValue
+  abstract class PTransformReplacement<InputT extends PInput, OutputT extends POutput> {
+    public static <InputT extends PInput, OutputT extends POutput>
+        PTransformReplacement<InputT, OutputT> of(
+            InputT input, PTransform<InputT, OutputT> transform) {
+      return new AutoValue_PTransformOverrideFactory_PTransformReplacement(input, transform);
+    }
+    public abstract InputT getInput();
+    public abstract PTransform<InputT, OutputT> getTransform();
+  }
+
+  /**
+   * A mapping between original {@link TaggedPValue} outputs and their replacements.
+   */
   @AutoValue
   abstract class ReplacementOutput {
     public static ReplacementOutput of(TaggedPValue original, TaggedPValue replacement) {

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AppliedPTransform.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AppliedPTransform.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AppliedPTransform.java
index 8d99a62..bdb61b8 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AppliedPTransform.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AppliedPTransform.java
@@ -31,6 +31,11 @@ import org.apache.beam.sdk.values.TupleTag;
  *
  * <p>For internal use.
  *
+ * <p>Inputs and outputs are stored in their expanded forms, as the condensed form of a composite
+ * {@link PInput} or {@link POutput} is a language-specific concept, and {@link AppliedPTransform}
+ * represents a possibly cross-language transform for which no appropriate composite type exists
+ * in the Java SDK.
+ *
  * @param <InputT>     transform input type
  * @param <OutputT>    transform output type
  * @param <TransformT> transform type

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
index 6ce016d..75cabf2 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java
@@ -406,16 +406,10 @@ public class PipelineTest {
     class ReplacementOverrideFactory
         implements PTransformOverrideFactory<
             PCollection<String>, PCollection<Long>, OriginalTransform> {
-
       @Override
-      public PTransform<PCollection<String>, PCollection<Long>> getReplacementTransform(
-          OriginalTransform transform) {
-        return new ReplacementTransform();
-      }
-
-      @Override
-      public PCollection<String> getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-        return originalInput;
+      public PTransformReplacement<PCollection<String>, PCollection<Long>> getReplacementTransform(
+          AppliedPTransform<PCollection<String>, PCollection<Long>, OriginalTransform> transform) {
+        return PTransformReplacement.of(originalInput, new ReplacementTransform());
       }
 
       @Override
@@ -464,14 +458,9 @@ public class PipelineTest {
   static class BoundedCountingInputOverride
       implements PTransformOverrideFactory<PBegin, PCollection<Long>, BoundedCountingInput> {
     @Override
-    public PTransform<PBegin, PCollection<Long>> getReplacementTransform(
-        BoundedCountingInput transform) {
-      return Create.of(0L);
-    }
-
-    @Override
-    public PBegin getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-      return p.begin();
+    public PTransformReplacement<PBegin, PCollection<Long>> getReplacementTransform(
+        AppliedPTransform<PBegin, PCollection<Long>, BoundedCountingInput> transform) {
+      return PTransformReplacement.of(transform.getPipeline().begin(), Create.of(0L));
     }
 
     @Override
@@ -489,15 +478,11 @@ public class PipelineTest {
   }
   static class UnboundedCountingInputOverride
       implements PTransformOverrideFactory<PBegin, PCollection<Long>, UnboundedCountingInput> {
-    @Override
-    public PTransform<PBegin, PCollection<Long>> getReplacementTransform(
-        UnboundedCountingInput transform) {
-      return CountingInput.upTo(100L);
-    }
 
     @Override
-    public PBegin getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-      return p.begin();
+    public PTransformReplacement<PBegin, PCollection<Long>> getReplacementTransform(
+        AppliedPTransform<PBegin, PCollection<Long>, UnboundedCountingInput> transform) {
+      return PTransformReplacement.of(transform.getPipeline().begin(), CountingInput.upTo(100L));
     }
 
     @Override


[20/50] [abbrv] beam git commit: [BEAM-1922] This closes #2482

Posted by ke...@apache.org.
[BEAM-1922] This closes #2482


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/588a4d00
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/588a4d00
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/588a4d00

Branch: refs/heads/jstorm-runner
Commit: 588a4d00e5394cb636a6a7d7c266775ad167909a
Parents: 946778c dc84626
Author: Jean-Baptiste Onofr� <jb...@apache.org>
Authored: Mon Apr 17 18:07:56 2017 +0200
Committer: Jean-Baptiste Onofr� <jb...@apache.org>
Committed: Mon Apr 17 18:07:56 2017 +0200

----------------------------------------------------------------------
 .../org/apache/beam/sdk/io/jdbc/JdbcIO.java     | 40 +++++++++++---------
 .../org/apache/beam/sdk/io/jdbc/JdbcIOTest.java | 10 ++---
 2 files changed, 27 insertions(+), 23 deletions(-)
----------------------------------------------------------------------



[17/50] [abbrv] beam git commit: Fix javadoc warnings

Posted by ke...@apache.org.
Fix javadoc warnings


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/7cf06f59
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/7cf06f59
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/7cf06f59

Branch: refs/heads/jstorm-runner
Commit: 7cf06f591f8e43a5d327e8aa2998c040677d169e
Parents: 9b8f230
Author: Isma�l Mej�a <ie...@apache.org>
Authored: Mon Apr 17 01:06:56 2017 +0200
Committer: Jean-Baptiste Onofr� <jb...@apache.org>
Committed: Mon Apr 17 11:06:07 2017 +0200

----------------------------------------------------------------------
 .../org/apache/beam/runners/apex/ApexYarnLauncher.java |  2 --
 .../apache/beam/runners/direct/ModelEnforcement.java   | 13 ++++++-------
 .../beam/runners/direct/TransformEvaluatorFactory.java | 10 +++++-----
 .../src/main/java/org/apache/beam/sdk/io/AvroIO.java   | 13 ++++++-------
 .../src/main/java/org/apache/beam/sdk/io/Sink.java     |  2 +-
 .../main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java  |  2 --
 .../apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java    |  7 +------
 7 files changed, 19 insertions(+), 30 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/7cf06f59/runners/apex/src/main/java/org/apache/beam/runners/apex/ApexYarnLauncher.java
----------------------------------------------------------------------
diff --git a/runners/apex/src/main/java/org/apache/beam/runners/apex/ApexYarnLauncher.java b/runners/apex/src/main/java/org/apache/beam/runners/apex/ApexYarnLauncher.java
index 198b9bf..b84144c 100644
--- a/runners/apex/src/main/java/org/apache/beam/runners/apex/ApexYarnLauncher.java
+++ b/runners/apex/src/main/java/org/apache/beam/runners/apex/ApexYarnLauncher.java
@@ -296,8 +296,6 @@ public class ApexYarnLauncher {
 
   /**
    * Transfer the properties to the configuration object.
-   * @param conf
-   * @param props
    */
   public static void addProperties(Configuration conf, Properties props) {
     for (final String propertyName : props.stringPropertyNames()) {

http://git-wip-us.apache.org/repos/asf/beam/blob/7cf06f59/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ModelEnforcement.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ModelEnforcement.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ModelEnforcement.java
index 25226f7..96dbc2b 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ModelEnforcement.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ModelEnforcement.java
@@ -17,7 +17,6 @@
  */
 package org.apache.beam.runners.direct;
 
-import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.util.WindowedValue;
@@ -28,9 +27,9 @@ import org.apache.beam.sdk.values.PCollection;
  *
  * <p>ModelEnforcement is performed on a per-element and per-bundle basis. The
  * {@link ModelEnforcement} is provided with the input bundle as part of
- * {@link ModelEnforcementFactory#forBundle(CommittedBundle, AppliedPTransform)}, each element
- * before and after that element is provided to an underlying {@link TransformEvaluator}, and the
- * output {@link TransformResult} and committed output bundles after the
+ * {@link ModelEnforcementFactory#forBundle(DirectRunner.CommittedBundle, AppliedPTransform)} each
+ * element before and after that element is provided to an underlying {@link TransformEvaluator},
+ * and the output {@link TransformResult} and committed output bundles after the
  * {@link TransformEvaluator} has completed.
  *
  * <p>Typically, {@link ModelEnforcement} will obtain required metadata (such as the {@link Coder}
@@ -54,10 +53,10 @@ public interface ModelEnforcement<T> {
   /**
    * Called after a bundle has been completed and {@link TransformEvaluator#finishBundle()} has been
    * called, producing the provided {@link TransformResult} and
-   * {@link CommittedBundle output bundles}.
+   * {@link DirectRunner.CommittedBundle output bundles}.
    */
   void afterFinish(
-      CommittedBundle<T> input,
+      DirectRunner.CommittedBundle<T> input,
       TransformResult<T> result,
-      Iterable<? extends CommittedBundle<?>> outputs);
+      Iterable<? extends DirectRunner.CommittedBundle<?>> outputs);
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/7cf06f59/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
index efbe137..c7bc46f 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java
@@ -18,7 +18,6 @@
 package org.apache.beam.runners.direct;
 
 import javax.annotation.Nullable;
-import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.Read;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
@@ -48,13 +47,14 @@ public interface TransformEvaluatorFactory {
    */
   @Nullable
   <InputT> TransformEvaluator<InputT> forApplication(
-      AppliedPTransform<?, ?, ?> application, CommittedBundle<?> inputBundle)
+      AppliedPTransform<?, ?, ?> application, DirectRunner.CommittedBundle<?> inputBundle)
       throws Exception;
 
   /**
-   * Cleans up any state maintained by this {@link TransformEvaluatorFactory}. Called after a {@link
-   * Pipeline} is shut down. No more calls to {@link #forApplication(AppliedPTransform,
-   * CommittedBundle)} will be made after a call to {@link #cleanup()}.
+   * Cleans up any state maintained by this {@link TransformEvaluatorFactory}. Called after a
+   * {@link Pipeline} is shut down. No more calls to
+   * {@link #forApplication(AppliedPTransform, DirectRunner.CommittedBundle)} will be made after
+   * a call to {@link #cleanup()}.
    */
   void cleanup() throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/7cf06f59/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
index a41c9f5..33fe323 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java
@@ -40,7 +40,6 @@ import org.apache.avro.reflect.ReflectData;
 import org.apache.beam.sdk.coders.AvroCoder;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.VoidCoder;
-import org.apache.beam.sdk.io.FileBasedSink.FilenamePolicy;
 import org.apache.beam.sdk.io.Read.Bounded;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.runners.PipelineRunner;
@@ -91,8 +90,8 @@ import org.apache.beam.sdk.values.PDone;
  * the path of the file to write to (e.g., a local filename or sharded
  * filename pattern if running locally, or a Google Cloud Storage
  * filename or sharded filename pattern of the form
- * {@code "gs://<bucket>/<filepath>"}). {@link AvroIO.Write#to(FilenamePolicy)} can also be used
- * to specify a custom file naming policy.
+ * {@code "gs://<bucket>/<filepath>"}). {@link AvroIO.Write#to(FileBasedSink.FilenamePolicy)}
+ * can also be used to specify a custom file naming policy.
  *
  * <p>By default, all input is put into the global window before writing. If per-window writes are
  * desired - for example, when using a streaming runner -
@@ -384,7 +383,7 @@ public class AvroIO {
      * Returns a {@link PTransform} that writes to the file(s) specified by the provided
      * {@link FileBasedSink.FilenamePolicy}.
      */
-    public static Bound<GenericRecord> to(FilenamePolicy filenamePolicy) {
+    public static Bound<GenericRecord> to(FileBasedSink.FilenamePolicy filenamePolicy) {
       return new Bound<>(GenericRecord.class).to(filenamePolicy);
     }
 
@@ -517,7 +516,7 @@ public class AvroIO {
       /** An option to indicate if output validation is desired. Default is true. */
       final boolean validate;
       final boolean windowedWrites;
-      FilenamePolicy filenamePolicy;
+      FileBasedSink.FilenamePolicy filenamePolicy;
 
       /**
        * The codec used to encode the blocks in the Avro file. String value drawn from those in
@@ -555,7 +554,7 @@ public class AvroIO {
           SerializableAvroCodecFactory codec,
           Map<String, Object> metadata,
           boolean windowedWrites,
-          FilenamePolicy filenamePolicy) {
+          FileBasedSink.FilenamePolicy filenamePolicy) {
         super(name);
         this.filenamePrefix = filenamePrefix;
         this.filenameSuffix = filenameSuffix;
@@ -607,7 +606,7 @@ public class AvroIO {
             filenamePolicy);
       }
 
-      public Bound<T> to(FilenamePolicy filenamePolicy) {
+      public Bound<T> to(FileBasedSink.FilenamePolicy filenamePolicy) {
         return new Bound<>(
             name,
             filenamePrefix,

http://git-wip-us.apache.org/repos/asf/beam/blob/7cf06f59/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java
index d53c6ce..ba1afbb 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java
@@ -249,7 +249,7 @@ public abstract class Sink<T> implements Serializable, HasDisplayData {
      * not interfere with the output of other Writers, as a bundle may be executed many times for
      * fault tolerance. See {@link Sink} for more information about bundle ids.
      *
-     * <p></p>The window and paneInfo arguments are populated when windowed writes are requested.
+     * <p>The window and paneInfo arguments are populated when windowed writes are requested.
      * shard and numbShards are populated for the case of static sharding. In cases where the
      * runner is dynamically picking sharding, shard and numShards might both be set to -1.
      */

http://git-wip-us.apache.org/repos/asf/beam/blob/7cf06f59/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
index 8fdbeb0..05a30a4 100644
--- a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
+++ b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
@@ -227,8 +227,6 @@ public class JdbcIO {
      *
      * <p>NOTE - The "user" and "password" properties can be add via {@link #withUsername(String)},
      * {@link #withPassword(String)}, so they do not need to be included here.
-     * @param connectionProperties
-     * @return
      */
     public DataSourceConfiguration withConnectionProperties(String connectionProperties) {
       checkArgument(connectionProperties != null, "DataSourceConfiguration.create(driver, url)"

http://git-wip-us.apache.org/repos/asf/beam/blob/7cf06f59/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
index 919fda3..e193d29 100644
--- a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
+++ b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
@@ -130,14 +130,11 @@ public class MongoDbGridFSIO {
     /**
      * Output the object.  The default timestamp will be the GridFSDBFile
      * creation timestamp.
-     * @param output
      */
     void output(T output);
 
     /**
      * Output the object using the specified timestamp.
-     * @param output
-     * @param timestamp
      */
     void output(T output, Instant timestamp);
   }
@@ -145,7 +142,6 @@ public class MongoDbGridFSIO {
   /**
    * Interface for the parser that is used to parse the GridFSDBFile into
    * the appropriate types.
-   * @param <T>
    */
   public interface Parser<T> extends Serializable {
     void parse(GridFSDBFile input, ParserCallback<T> callback) throws IOException;
@@ -533,7 +529,6 @@ public class MongoDbGridFSIO {
 
   /**
    * Function that is called to write the data to the give GridFS OutputStream.
-   * @param <T>
    */
   public interface WriteFn<T> extends Serializable {
     /**
@@ -624,6 +619,7 @@ public class MongoDbGridFSIO {
       return PDone.in(input.getPipeline());
     }
   }
+
   private static class GridFsWriteFn<T> extends DoFn<T, Void> {
 
     private final Write<T> spec;
@@ -686,6 +682,5 @@ public class MongoDbGridFSIO {
         }
       }
     }
-
   }
 }


[15/50] [abbrv] beam git commit: Add javadoc to getCheckpointMark in UnboundedSource

Posted by ke...@apache.org.
Add javadoc to getCheckpointMark in UnboundedSource


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/c62d698f
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/c62d698f
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/c62d698f

Branch: refs/heads/jstorm-runner
Commit: c62d698f52cab716f226ca4aec40718bf851a14c
Parents: f7d727c
Author: wtanaka.com <wt...@yahoo.com>
Authored: Sun Apr 2 23:02:00 2017 -1000
Committer: Jean-Baptiste Onofr� <jb...@apache.org>
Committed: Mon Apr 17 09:49:49 2017 +0200

----------------------------------------------------------------------
 .../java/org/apache/beam/sdk/io/UnboundedSource.java   | 13 +++++++++++++
 1 file changed, 13 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/c62d698f/sdks/java/core/src/main/java/org/apache/beam/sdk/io/UnboundedSource.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/UnboundedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/UnboundedSource.java
index 3f1ba0e..cc1f598 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/UnboundedSource.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/UnboundedSource.java
@@ -220,6 +220,19 @@ public abstract class UnboundedSource<
     /**
      * Returns a {@link CheckpointMark} representing the progress of this {@code UnboundedReader}.
      *
+     * <p>If this {@code UnboundedReader} does not support checkpoints, it may return a
+     * CheckpointMark which does nothing, like:
+     *
+     * <pre>{@code
+     * public UnboundedSource.CheckpointMark getCheckpointMark() {
+     *   return new UnboundedSource.CheckpointMark() {
+     *     public void finalizeCheckpoint() throws IOException {
+     *       // nothing to do
+     *     }
+     *   };
+     * }
+     * }</pre>
+     *
      * <p>All elements read up until this method is called will be processed together as a bundle.
      * (An element is considered 'read' if it could be returned by a call to {@link #getCurrent}.)
      * Once the result of processing those elements and the returned checkpoint have been durably


[36/50] [abbrv] beam git commit: This closes #2557

Posted by ke...@apache.org.
This closes #2557


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/9b0cc984
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/9b0cc984
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/9b0cc984

Branch: refs/heads/jstorm-runner
Commit: 9b0cc9847d8f0119b33961ec4768cf83dcd2b5fd
Parents: a25c7d3 b9e6577
Author: Thomas Groh <tg...@google.com>
Authored: Mon Apr 17 18:20:07 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Mon Apr 17 18:20:07 2017 -0700

----------------------------------------------------------------------
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    | 59 +++++++-------------
 .../io/gcp/bigquery/BigQueryTableSource.java    | 30 +++++++++-
 2 files changed, 50 insertions(+), 39 deletions(-)
----------------------------------------------------------------------



[48/50] [abbrv] beam git commit: This closes #2574

Posted by ke...@apache.org.
This closes #2574


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/686b774c
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/686b774c
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/686b774c

Branch: refs/heads/jstorm-runner
Commit: 686b774ceda8bee32032cb421651e8350ca5bf3d
Parents: fac4f3e d8213fa
Author: Isma�l Mej�a <ie...@apache.org>
Authored: Tue Apr 18 17:28:54 2017 +0200
Committer: Isma�l Mej�a <ie...@apache.org>
Committed: Tue Apr 18 17:28:54 2017 +0200

----------------------------------------------------------------------
 .../examples/streaming/KafkaIOExamples.java     | 338 -------------------
 .../KafkaWindowedWordCountExample.java          | 164 ---------
 .../FlinkStreamingTransformTranslators.java     |  87 +----
 .../flink/translation/types/FlinkCoder.java     |  63 ----
 .../streaming/io/UnboundedFlinkSink.java        | 200 -----------
 .../streaming/io/UnboundedFlinkSource.java      | 120 -------
 6 files changed, 12 insertions(+), 960 deletions(-)
----------------------------------------------------------------------



[39/50] [abbrv] beam git commit: [BEAM-1991] Sum.SumDoubleFn => Sum.ofDoubles

Posted by ke...@apache.org.
[BEAM-1991] Sum.SumDoubleFn => Sum.ofDoubles

This is left over from commit 78a360eac35507d9a558fc6117bb56b67b8a884e

 - [x] Make sure the PR title is formatted like:
   `[BEAM-<Jira issue #>] Description of pull request`
 - [x] Make sure tests pass via `mvn clean verify`. (Even better, enable
       Travis-CI on your fork and ensure the whole test matrix passes).
 - [x] Replace `<Jira issue #>` in the title with the actual Jira issue
       number, if there is one.
 - [ ] If this contribution is large, please file an Apache
       [Individual Contributor License Agreement](https://www.apache.org/licenses/icla.pdf).


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/7c858a8a
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/7c858a8a
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/7c858a8a

Branch: refs/heads/jstorm-runner
Commit: 7c858a8aa7aa3ff27e9d38f9ff952285227d199e
Parents: 36e4355
Author: wtanaka.com <wt...@yahoo.com>
Authored: Mon Apr 17 16:45:45 2017 -1000
Committer: Dan Halperin <dh...@google.com>
Committed: Mon Apr 17 21:02:33 2017 -0700

----------------------------------------------------------------------
 .../core/src/main/java/org/apache/beam/sdk/transforms/Combine.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/7c858a8a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
index 58d65d0..1de6d8c 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java
@@ -1775,7 +1775,7 @@ public class Combine {
    * PCollection<KV<String, Double>> salesRecords = ...;
    * PCollection<KV<String, Double>> totalSalesPerPerson =
    *     salesRecords.apply(Combine.<String, Double, Double>perKey(
-   *         new Sum.SumDoubleFn()));
+   *         Sum.ofDoubles()));
    * } </pre>
    *
    * <p>Each output element is in the window by which its corresponding input


[27/50] [abbrv] beam git commit: This closes #2426

Posted by ke...@apache.org.
This closes #2426


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/32a576ab
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/32a576ab
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/32a576ab

Branch: refs/heads/jstorm-runner
Commit: 32a576ab6e04a86d2205dfd766205521753a6d60
Parents: 075b621 79b066d
Author: Thomas Groh <tg...@google.com>
Authored: Mon Apr 17 13:09:39 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Mon Apr 17 13:09:39 2017 -0700

----------------------------------------------------------------------
 .../java/org/apache/beam/runners/core/OldDoFn.java     |  3 ++-
 .../beam/runners/spark/util/SparkSideInputReader.java  |  3 +--
 .../apache/beam/sdk/transforms/windowing/WindowFn.java | 13 -------------
 .../org/apache/beam/sdk/testing/StaticWindowsTest.java | 10 +++++++---
 4 files changed, 10 insertions(+), 19 deletions(-)
----------------------------------------------------------------------



[21/50] [abbrv] beam git commit: Update Dataflow Worker Version

Posted by ke...@apache.org.
Update Dataflow Worker Version


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/faece41c
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/faece41c
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/faece41c

Branch: refs/heads/jstorm-runner
Commit: faece41cea340c4148f2b5717fc6ecd0d1eb50ab
Parents: 588a4d0
Author: Thomas Groh <tg...@google.com>
Authored: Mon Apr 17 08:47:28 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Mon Apr 17 09:20:46 2017 -0700

----------------------------------------------------------------------
 runners/google-cloud-dataflow-java/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/faece41c/runners/google-cloud-dataflow-java/pom.xml
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/pom.xml b/runners/google-cloud-dataflow-java/pom.xml
index 7b0bb88..ff63a31 100644
--- a/runners/google-cloud-dataflow-java/pom.xml
+++ b/runners/google-cloud-dataflow-java/pom.xml
@@ -33,7 +33,7 @@
   <packaging>jar</packaging>
 
   <properties>
-    <dataflow.container_version>beam-master-20170413</dataflow.container_version>
+    <dataflow.container_version>beam-master-20170417</dataflow.container_version>
     <dataflow.fnapi_environment_major_version>1</dataflow.fnapi_environment_major_version>
     <dataflow.legacy_environment_major_version>6</dataflow.legacy_environment_major_version>
   </properties>


[03/50] [abbrv] beam git commit: This closes #2512

Posted by ke...@apache.org.
This closes #2512


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/f30d5b9e
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/f30d5b9e
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/f30d5b9e

Branch: refs/heads/jstorm-runner
Commit: f30d5b9e9db80e35c9fae4c05d2a76655922db1c
Parents: 89ff0b1 113471d
Author: Thomas Groh <tg...@google.com>
Authored: Fri Apr 14 14:40:57 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Fri Apr 14 14:40:57 2017 -0700

----------------------------------------------------------------------
 .../apex/translation/ParDoTranslator.java       |   6 +-
 .../operators/ApexGroupByKeyOperator.java       |  19 +-
 .../operators/ApexParDoOperator.java            |  48 +--
 .../apex/translation/utils/NoOpStepContext.java |   2 +-
 .../apex/translation/ParDoTranslatorTest.java   |  18 +-
 .../beam/runners/core/BaseExecutionContext.java |  13 +-
 .../apache/beam/runners/core/DoFnAdapters.java  |  16 +-
 .../apache/beam/runners/core/DoFnRunners.java   |   8 +-
 .../beam/runners/core/ExecutionContext.java     |  13 +-
 .../GroupAlsoByWindowViaWindowSetNewDoFn.java   |   6 +-
 .../org/apache/beam/runners/core/OldDoFn.java   |  38 +--
 ...eBoundedSplittableProcessElementInvoker.java |   8 +-
 .../beam/runners/core/OutputWindowedValue.java  |  10 +-
 .../beam/runners/core/SimpleDoFnRunner.java     |  54 ++--
 .../beam/runners/core/SimpleOldDoFnRunner.java  |  63 ++--
 .../beam/runners/core/SplittableParDo.java      |  21 +-
 .../beam/runners/core/WindowingInternals.java   |   8 +-
 .../core/WindowingInternalsAdapters.java        |   8 +-
 .../core/GroupAlsoByWindowsProperties.java      |  10 +-
 .../apache/beam/runners/core/NoOpOldDoFn.java   |   4 +-
 .../apache/beam/runners/core/OldDoFnTest.java   |   4 +-
 ...ndedSplittableProcessElementInvokerTest.java |   6 +-
 .../beam/runners/core/ReduceFnTester.java       |   8 +-
 .../runners/core/SimpleOldDoFnRunnerTest.java   |   4 +-
 .../beam/runners/core/SplittableParDoTest.java  |   8 +-
 .../GroupAlsoByWindowEvaluatorFactory.java      |   8 +-
 .../beam/runners/direct/ParDoEvaluator.java     |   4 +-
 .../runners/direct/ParDoEvaluatorFactory.java   |  10 +-
 .../direct/ParDoMultiOverrideFactory.java       |   2 +-
 ...littableProcessElementsEvaluatorFactory.java |   8 +-
 .../direct/StatefulParDoEvaluatorFactory.java   |   2 +-
 .../beam/runners/direct/ParDoEvaluatorTest.java |   6 +-
 .../FlinkStreamingTransformTranslators.java     |  20 +-
 .../functions/FlinkDoFnFunction.java            |   4 +-
 .../functions/FlinkNoOpStepContext.java         |   2 +-
 .../functions/FlinkStatefulDoFnFunction.java    |   4 +-
 .../wrappers/streaming/DoFnOperator.java        |  14 +-
 .../streaming/SplittableDoFnOperator.java       |  10 +-
 .../wrappers/streaming/WindowDoFnOperator.java  |   4 +-
 .../flink/streaming/DoFnOperatorTest.java       |  34 +--
 .../dataflow/BatchStatefulParDoOverrides.java   |   2 +-
 .../runners/dataflow/BatchViewOverrides.java    |   6 +-
 .../dataflow/BatchViewOverridesTest.java        |   4 +-
 .../SparkGroupAlsoByWindowViaWindowSet.java     |  10 +-
 .../spark/translation/MultiDoFnFunction.java    |   4 +-
 ...SparkGroupAlsoByWindowViaOutputBufferFn.java |   8 +-
 .../spark/translation/SparkProcessContext.java  |   2 +-
 .../streaming/StreamingTransformTranslator.java |   2 +-
 .../org/apache/beam/sdk/transforms/Combine.java |   2 +-
 .../org/apache/beam/sdk/transforms/DoFn.java    |  14 +-
 .../apache/beam/sdk/transforms/DoFnTester.java  |  41 ++-
 .../org/apache/beam/sdk/transforms/ParDo.java   |  77 +++--
 .../apache/beam/sdk/transforms/Partition.java   |   2 +-
 .../beam/sdk/values/PCollectionTuple.java       |   3 +-
 .../org/apache/beam/sdk/values/TupleTag.java    |  26 +-
 .../apache/beam/sdk/values/TupleTagList.java    |   2 +-
 .../org/apache/beam/sdk/values/TypedPValue.java |   4 +-
 .../apache/beam/sdk/metrics/MetricsTest.java    |   2 +-
 .../apache/beam/sdk/transforms/ParDoTest.java   | 293 ++++++++++---------
 .../beam/sdk/transforms/SplittableDoFnTest.java |  21 +-
 .../beam/sdk/values/PCollectionTupleTest.java   |   8 +-
 .../apache/beam/sdk/values/TypedPValueTest.java |  46 +--
 .../beam/fn/harness/fake/FakeStepContext.java   |   2 +-
 .../control/ProcessBundleHandlerTest.java       |  30 +-
 .../sdk/io/gcp/bigquery/WritePartition.java     |   6 +-
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     |   4 +-
 66 files changed, 578 insertions(+), 578 deletions(-)
----------------------------------------------------------------------



[49/50] [abbrv] beam git commit: Merge branch 'master' upto commit 686b774ceda8bee32032cb421651e8350ca5bf3d into jstorm-runner

Posted by ke...@apache.org.
Merge branch 'master' upto commit 686b774ceda8bee32032cb421651e8350ca5bf3d into jstorm-runner


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/f1e170a5
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/f1e170a5
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/f1e170a5

Branch: refs/heads/jstorm-runner
Commit: f1e170a5fa9dc4d462af42f9f382afd0ecd798b6
Parents: f6a89b0 686b774
Author: Pei He <he...@alibaba-inc.com>
Authored: Tue Apr 25 17:37:52 2017 +0800
Committer: Pei He <he...@alibaba-inc.com>
Committed: Tue Apr 25 18:19:41 2017 +0800

----------------------------------------------------------------------
 .gitignore                                      |    3 +
 .jenkins/common_job_properties.groovy           |  261 ----
 .../job_beam_PerformanceTests_Dataflow.groovy   |   43 -
 .jenkins/job_beam_PerformanceTests_JDBC.groovy  |   60 -
 .jenkins/job_beam_PerformanceTests_Spark.groovy |   44 -
 ...job_beam_PostCommit_Java_MavenInstall.groovy |   42 -
 ..._PostCommit_Java_ValidatesRunner_Apex.groovy |   48 -
 ...tCommit_Java_ValidatesRunner_Dataflow.groovy |   45 -
 ...PostCommit_Java_ValidatesRunner_Flink.groovy |   43 -
 ...tCommit_Java_ValidatesRunner_Gearpump.groovy |   49 -
 ...PostCommit_Java_ValidatesRunner_Spark.groovy |   44 -
 .../job_beam_PostCommit_Python_Verify.groovy    |   55 -
 .../job_beam_PreCommit_Java_MavenInstall.groovy |   42 -
 .../job_beam_PreCommit_Website_Stage.groovy     |   80 -
 .jenkins/job_beam_PreCommit_Website_Test.groovy |   65 -
 .../job_beam_Release_NightlySnapshot.groovy     |   45 -
 .jenkins/job_seed.groovy                        |   53 -
 .../jenkins/common_job_properties.groovy        |  261 ++++
 .../job_beam_PerformanceTests_Dataflow.groovy   |   43 +
 .../job_beam_PerformanceTests_JDBC.groovy       |   60 +
 .../job_beam_PerformanceTests_Spark.groovy      |   44 +
 ...job_beam_PostCommit_Java_MavenInstall.groovy |   42 +
 ..._PostCommit_Java_ValidatesRunner_Apex.groovy |   48 +
 ...tCommit_Java_ValidatesRunner_Dataflow.groovy |   45 +
 ...PostCommit_Java_ValidatesRunner_Flink.groovy |   43 +
 ...tCommit_Java_ValidatesRunner_Gearpump.groovy |   49 +
 ...PostCommit_Java_ValidatesRunner_Spark.groovy |   44 +
 .../job_beam_PostCommit_Python_Verify.groovy    |   55 +
 .../job_beam_PreCommit_Java_MavenInstall.groovy |   42 +
 .../job_beam_PreCommit_Website_Stage.groovy     |   80 +
 .../job_beam_PreCommit_Website_Test.groovy      |   65 +
 .../job_beam_Release_NightlySnapshot.groovy     |   45 +
 .test-infra/jenkins/job_seed.groovy             |   53 +
 .../cassandra-service-for-local-dev.yaml        |   28 +
 .../cassandra-svc-statefulset.yaml              |  114 ++
 .../LargeITCluster/cassandra-svc-temp.yaml      |   74 +
 .../cassandra/LargeITCluster/data-load.sh       |  122 ++
 .../cassandra/LargeITCluster/show_health.sh     |   47 +
 .../cassandra/LargeITCluster/start-up.sh        |   22 +
 .../cassandra/LargeITCluster/teardown.sh        |   25 +
 .../cassandra-service-for-local-dev.yaml        |   30 +
 .../SmallITCluster/cassandra-svc-rc.yaml        |   74 +
 .../cassandra/SmallITCluster/data-load.sh       |   86 +
 .../cassandra/SmallITCluster/show_health.sh     |   47 +
 .../cassandra/SmallITCluster/start-up.sh        |   23 +
 .../cassandra/SmallITCluster/teardown.sh        |   22 +
 .../kubernetes/cassandra/data-load-setup.sh     |   29 +
 .../elasticsearch-service-for-local-dev.yaml    |   33 +
 .../es-services-deployments.yaml                |  258 +++
 .../LargeProductionCluster/start-up.sh          |   22 +
 .../LargeProductionCluster/teardown.sh          |   21 +
 .../elasticsearch-service-for-local-dev.yaml    |   34 +
 .../SmallITCluster/elasticsearch-svc-rc.yaml    |   96 ++
 .../elasticsearch/SmallITCluster/start-up.sh    |   23 +
 .../elasticsearch/SmallITCluster/teardown.sh    |   21 +
 .../kubernetes/elasticsearch/data-load-setup.sh |   26 +
 .../kubernetes/elasticsearch/data-load.sh       |   33 +
 .../kubernetes/elasticsearch/es_test_data.py    |  299 ++++
 .../kubernetes/elasticsearch/show-health.sh     |   33 +
 .../postgres/postgres-service-for-local-dev.yml |   28 +
 .test-infra/kubernetes/postgres/postgres.yml    |   56 +
 .test-infra/travis/README.md                    |   23 +
 .test-infra/travis/settings.xml                 |   33 +
 .test-infra/travis/test_wordcount.sh            |  125 ++
 .travis.yml                                     |    4 +-
 .travis/README.md                               |   23 -
 .travis/settings.xml                            |   33 -
 .travis/test_wordcount.sh                       |  125 --
 README.md                                       |    1 +
 examples/java8/pom.xml                          |   34 +
 .../beam/examples/MinimalWordCountJava8.java    |    9 +-
 .../beam/examples/complete/game/GameStats.java  |    8 +-
 .../examples/complete/game/LeaderBoard.java     |    2 +-
 .../beam/examples/complete/game/UserScore.java  |    5 +-
 .../examples/MinimalWordCountJava8Test.java     |    9 +-
 .../complete/game/HourlyTeamScoreTest.java      |    5 +-
 .../examples/complete/game/UserScoreTest.java   |    6 +-
 .../apache/beam/runners/apex/ApexRunner.java    |   34 +-
 .../beam/runners/apex/ApexYarnLauncher.java     |  111 +-
 .../translation/ApexPipelineTranslator.java     |    2 +-
 .../FlattenPCollectionTranslator.java           |   13 +-
 .../apex/translation/GroupByKeyTranslator.java  |    4 +-
 .../apex/translation/ParDoTranslator.java       |   30 +-
 .../apex/translation/TranslationContext.java    |   22 +-
 .../translation/WindowAssignTranslator.java     |   58 +-
 .../operators/ApexGroupByKeyOperator.java       |  273 +---
 .../operators/ApexParDoOperator.java            |  235 ++-
 .../operators/ApexProcessFnOperator.java        |  184 +++
 .../translation/utils/ApexStateInternals.java   |   73 +-
 .../apex/translation/utils/NoOpStepContext.java |    2 +-
 .../utils/SerializablePipelineOptions.java      |   13 +-
 .../translation/utils/StateInternalsProxy.java  |   67 +
 .../translation/ApexGroupByKeyOperatorTest.java |    2 +-
 .../apex/translation/ParDoTranslatorTest.java   |   20 +-
 .../utils/ApexStateInternalsTest.java           |   25 +-
 runners/core-construction-java/pom.xml          |   36 +
 .../beam/runners/core/construction/Coders.java  |  174 +++
 .../DeduplicatedFlattenFactory.java             |   79 +-
 .../EmptyFlattenAsCreateFactory.java            |   25 +-
 .../runners/core/construction/PCollections.java |   97 ++
 .../core/construction/PTransformMatchers.java   |    7 +-
 .../construction/PTransformReplacements.java    |   69 +
 .../core/construction/PrimitiveCreate.java      |   18 +-
 .../core/construction/ReplacementOutputs.java   |   63 +-
 .../core/construction/SdkComponents.java        |  159 ++
 .../SingleInputOutputOverrideFactory.java       |   14 +-
 .../runners/core/construction/Triggers.java     |  336 ++++
 .../UnboundedReadFromBoundedSource.java         |  542 +++++++
 .../UnsupportedOverrideFactory.java             |   20 +-
 .../core/construction/WindowingStrategies.java  |  245 +++
 .../runners/core/construction/CodersTest.java   |  163 ++
 .../DeduplicatedFlattenFactoryTest.java         |   24 +-
 .../EmptyFlattenAsCreateFactoryTest.java        |   42 +-
 .../core/construction/PCollectionsTest.java     |  188 +++
 .../construction/PTransformMatchersTest.java    |  132 +-
 .../PTransformReplacementsTest.java             |  131 ++
 .../construction/ReplacementOutputsTest.java    |  109 +-
 .../core/construction/SdkComponentsTest.java    |  157 ++
 .../SingleInputOutputOverrideFactoryTest.java   |   37 +-
 .../runners/core/construction/TriggersTest.java |  111 ++
 .../UnboundedReadFromBoundedSourceTest.java     |  373 +++++
 .../UnsupportedOverrideFactoryTest.java         |   16 +-
 .../construction/WindowingStrategiesTest.java   |  110 ++
 runners/core-java/pom.xml                       |   10 +-
 .../beam/runners/core/AssignWindowsDoFn.java    |   78 -
 .../beam/runners/core/BaseExecutionContext.java |   13 +-
 .../apache/beam/runners/core/DoFnAdapters.java  |  323 ----
 .../apache/beam/runners/core/DoFnRunners.java   |   10 +-
 .../beam/runners/core/ExecutionContext.java     |   13 +-
 .../GroupAlsoByWindowViaOutputBufferDoFn.java   |   19 +-
 .../core/GroupAlsoByWindowViaWindowSetDoFn.java |    9 +-
 .../GroupAlsoByWindowViaWindowSetNewDoFn.java   |   19 +-
 .../core/GroupAlsoByWindowsAggregators.java     |   28 +
 .../runners/core/GroupAlsoByWindowsDoFn.java    |   46 -
 .../core/LateDataDroppingDoFnRunner.java        |    3 +-
 .../org/apache/beam/runners/core/OldDoFn.java   |   41 +-
 ...eBoundedSplittableProcessElementInvoker.java |  133 +-
 .../beam/runners/core/OutputWindowedValue.java  |   10 +-
 .../beam/runners/core/SimpleDoFnRunner.java     |   59 +-
 .../beam/runners/core/SimpleOldDoFnRunner.java  |   63 +-
 .../beam/runners/core/SplittableParDo.java      |   36 +-
 .../core/SplittableProcessElementInvoker.java   |   22 +-
 .../core/UnboundedReadFromBoundedSource.java    |  542 -------
 .../beam/runners/core/WindowingInternals.java   |    8 +-
 .../core/WindowingInternalsAdapters.java        |    8 +-
 .../triggers/AfterWatermarkStateMachine.java    |   14 +-
 ...roupAlsoByWindowViaOutputBufferDoFnTest.java |    4 +-
 .../core/GroupAlsoByWindowsProperties.java      |   37 +-
 .../apache/beam/runners/core/NoOpOldDoFn.java   |    4 +-
 .../apache/beam/runners/core/OldDoFnTest.java   |    4 +-
 ...ndedSplittableProcessElementInvokerTest.java |   27 +-
 .../beam/runners/core/ReduceFnTester.java       |   13 +-
 .../runners/core/SimpleOldDoFnRunnerTest.java   |    4 +-
 .../beam/runners/core/SplittableParDoTest.java  |  238 +--
 .../UnboundedReadFromBoundedSourceTest.java     |  373 -----
 .../direct/BoundedReadEvaluatorFactory.java     |    2 +-
 ...ectGBKIntoKeyedWorkItemsOverrideFactory.java |   16 +-
 .../beam/runners/direct/DirectGraphVisitor.java |    5 +-
 .../direct/DirectGroupByKeyOverrideFactory.java |   14 +-
 .../direct/ExecutorServiceParallelExecutor.java |    4 +-
 .../runners/direct/FlattenEvaluatorFactory.java |    2 +-
 .../GroupAlsoByWindowEvaluatorFactory.java      |   18 +-
 .../direct/GroupByKeyOnlyEvaluatorFactory.java  |    4 +-
 .../direct/KeyedPValueTrackingVisitor.java      |   14 +-
 .../beam/runners/direct/ModelEnforcement.java   |   13 +-
 .../beam/runners/direct/ParDoEvaluator.java     |    4 +-
 .../runners/direct/ParDoEvaluatorFactory.java   |   20 +-
 .../direct/ParDoMultiOverrideFactory.java       |   29 +-
 ...littableProcessElementsEvaluatorFactory.java |    8 +-
 .../direct/StatefulParDoEvaluatorFactory.java   |   10 +-
 .../direct/TestStreamEvaluatorFactory.java      |   20 +-
 .../direct/TransformEvaluatorFactory.java       |   10 +-
 .../direct/TransformExecutorServices.java       |   37 +-
 .../direct/UnboundedReadEvaluatorFactory.java   |    4 +-
 .../runners/direct/ViewEvaluatorFactory.java    |    4 +-
 .../runners/direct/ViewOverrideFactory.java     |   23 +-
 .../beam/runners/direct/WatermarkManager.java   |   19 +-
 .../runners/direct/WindowEvaluatorFactory.java  |    2 +-
 .../direct/WriteWithShardingFactory.java        |   22 +-
 .../runners/direct/DirectGraphVisitorTest.java  |    7 +-
 .../DirectGroupByKeyOverrideFactoryTest.java    |   12 +-
 .../beam/runners/direct/DirectRunnerTest.java   |    5 +-
 .../beam/runners/direct/ParDoEvaluatorTest.java |    8 +-
 .../direct/ParDoMultiOverrideFactoryTest.java   |   45 -
 .../StatefulParDoEvaluatorFactoryTest.java      |    2 +-
 .../direct/TestStreamEvaluatorFactoryTest.java  |   11 -
 .../direct/TransformExecutorServicesTest.java   |   48 +
 .../runners/direct/ViewOverrideFactoryTest.java |   44 +-
 .../direct/WriteWithShardingFactoryTest.java    |   23 +-
 .../examples/streaming/KafkaIOExamples.java     |  338 ----
 .../KafkaWindowedWordCountExample.java          |  164 --
 runners/flink/pom.xml                           |   45 +-
 runners/flink/runner/pom.xml                    |  109 +-
 .../flink/FlinkBatchTransformTranslators.java   |   36 +-
 .../flink/FlinkBatchTranslationContext.java     |   11 +-
 .../flink/FlinkDetachedRunnerResult.java        |    3 +-
 .../flink/FlinkStreamingPipelineTranslator.java |   61 +-
 .../FlinkStreamingTransformTranslators.java     |  139 +-
 .../flink/FlinkStreamingTranslationContext.java |   12 +-
 .../functions/FlinkDoFnFunction.java            |    4 +-
 .../functions/FlinkNoOpStepContext.java         |    2 +-
 .../functions/FlinkStatefulDoFnFunction.java    |    4 +-
 .../types/EncodedValueTypeInformation.java      |    9 -
 .../flink/translation/types/FlinkCoder.java     |   63 -
 .../utils/SerializedPipelineOptions.java        |    2 +
 .../wrappers/streaming/DoFnOperator.java        |   14 +-
 .../streaming/SingletonKeyedWorkItem.java       |    2 -
 .../streaming/SingletonKeyedWorkItemCoder.java  |    4 +-
 .../streaming/SplittableDoFnOperator.java       |   10 +-
 .../wrappers/streaming/WindowDoFnOperator.java  |    7 +-
 .../streaming/io/UnboundedFlinkSink.java        |  200 ---
 .../streaming/io/UnboundedFlinkSource.java      |  120 --
 .../beam/runners/flink/PipelineOptionsTest.java |    2 +-
 .../flink/streaming/DoFnOperatorTest.java       |   34 +-
 runners/google-cloud-dataflow-java/pom.xml      |   25 +-
 .../dataflow/BatchStatefulParDoOverrides.java   |   51 +-
 .../runners/dataflow/BatchViewOverrides.java    |   23 +-
 .../dataflow/DataflowPipelineTranslator.java    |   37 +-
 .../beam/runners/dataflow/DataflowRunner.java   |  146 +-
 .../DataflowUnboundedReadFromBoundedSource.java |  547 -------
 .../dataflow/PrimitiveParDoSingleFactory.java   |   15 +-
 .../dataflow/ReshuffleOverrideFactory.java      |   12 +-
 .../dataflow/StreamingViewOverrides.java        |   14 +-
 .../runners/dataflow/TransformTranslator.java   |    6 +-
 .../dataflow/testing/TestDataflowRunner.java    |   34 +-
 .../dataflow/BatchViewOverridesTest.java        |    4 +-
 .../dataflow/DataflowPipelineJobTest.java       |    7 +-
 ...aflowUnboundedReadFromBoundedSourceTest.java |   79 -
 .../PrimitiveParDoSingleFactoryTest.java        |   59 +-
 .../testing/TestDataflowRunnerTest.java         |    3 +-
 .../apache/beam/runners/spark/SparkRunner.java  |   20 +-
 .../beam/runners/spark/TestSparkRunner.java     |   21 +-
 .../beam/runners/spark/io/MicrobatchSource.java |  113 +-
 .../beam/runners/spark/io/SourceDStream.java    |   11 +-
 .../SparkGroupAlsoByWindowViaWindowSet.java     |   22 +-
 .../spark/stateful/StateSpecFunctions.java      |    6 +-
 .../runners/spark/translation/DoFnFunction.java |  130 --
 .../spark/translation/EvaluationContext.java    |   11 +-
 .../spark/translation/MultiDoFnFunction.java    |    4 +-
 .../spark/translation/SparkAssignWindowFn.java  |    3 +-
 ...SparkGroupAlsoByWindowViaOutputBufferFn.java |   18 +-
 .../spark/translation/SparkProcessContext.java  |    2 +-
 .../spark/translation/SparkRuntimeContext.java  |    2 +
 .../spark/translation/TransformTranslator.java  |   93 +-
 .../streaming/StreamingTransformTranslator.java |   96 +-
 .../spark/util/SparkSideInputReader.java        |    3 +-
 .../ResumeFromCheckpointStreamingTest.java      |   14 +-
 sdks/common/fn-api/pom.xml                      |    5 -
 .../fn-api/src/main/proto/beam_fn_api.proto     |  174 +--
 .../src/main/proto/beam_runner_api.proto        |   14 +-
 sdks/java/core/pom.xml                          |   44 -
 .../main/java/org/apache/beam/sdk/Pipeline.java |   39 +-
 .../java/org/apache/beam/sdk/io/AvroIO.java     |   13 +-
 .../java/org/apache/beam/sdk/io/PubsubIO.java   | 1195 --------------
 .../apache/beam/sdk/io/PubsubUnboundedSink.java |  494 ------
 .../beam/sdk/io/PubsubUnboundedSource.java      | 1463 ------------------
 .../main/java/org/apache/beam/sdk/io/Sink.java  |    2 +-
 .../org/apache/beam/sdk/io/UnboundedSource.java |   13 +
 .../beam/sdk/options/PipelineOptions.java       |   12 +-
 .../sdk/runners/PTransformOverrideFactory.java  |   31 +-
 .../apache/beam/sdk/runners/PipelineRunner.java |    7 +-
 .../beam/sdk/runners/TransformHierarchy.java    |   80 +-
 .../apache/beam/sdk/testing/TestPipeline.java   |    2 +
 .../beam/sdk/transforms/AppliedPTransform.java  |   24 +-
 .../org/apache/beam/sdk/transforms/Combine.java |    4 +-
 .../org/apache/beam/sdk/transforms/Create.java  |    7 +-
 .../org/apache/beam/sdk/transforms/DoFn.java    |   96 +-
 .../apache/beam/sdk/transforms/DoFnTester.java  |   46 +-
 .../beam/sdk/transforms/FlatMapElements.java    |  113 +-
 .../apache/beam/sdk/transforms/GroupByKey.java  |    4 +-
 .../apache/beam/sdk/transforms/MapElements.java |   99 +-
 .../org/apache/beam/sdk/transforms/ParDo.java   |   82 +-
 .../apache/beam/sdk/transforms/Partition.java   |    2 +-
 .../org/apache/beam/sdk/transforms/Sample.java  |    4 +
 .../transforms/join/KeyedPCollectionTuple.java  |   12 +-
 .../reflect/ByteBuddyDoFnInvokerFactory.java    |   47 +-
 .../sdk/transforms/reflect/DoFnInvoker.java     |    4 +-
 .../sdk/transforms/reflect/DoFnSignature.java   |   10 +-
 .../sdk/transforms/reflect/DoFnSignatures.java  |   96 +-
 .../splittabledofn/HasDefaultTracker.java       |   30 +
 .../transforms/splittabledofn/OffsetRange.java  |    8 +-
 .../splittabledofn/OffsetRangeTracker.java      |   33 +-
 .../splittabledofn/RestrictionTracker.java      |    8 +
 .../transforms/windowing/AfterWatermark.java    |   14 +-
 .../beam/sdk/transforms/windowing/Triggers.java |  320 ----
 .../beam/sdk/transforms/windowing/WindowFn.java |   13 -
 .../org/apache/beam/sdk/util/PubsubClient.java  |  544 -------
 .../apache/beam/sdk/util/PubsubGrpcClient.java  |  424 -----
 .../apache/beam/sdk/util/PubsubJsonClient.java  |  317 ----
 .../apache/beam/sdk/util/PubsubTestClient.java  |  436 ------
 .../org/apache/beam/sdk/util/Transport.java     |    3 +-
 .../beam/sdk/util/WindowingStrategies.java      |  267 ----
 .../java/org/apache/beam/sdk/values/PBegin.java |    6 +-
 .../apache/beam/sdk/values/PCollectionList.java |   27 +-
 .../beam/sdk/values/PCollectionTuple.java       |   13 +-
 .../java/org/apache/beam/sdk/values/PDone.java  |    6 +-
 .../java/org/apache/beam/sdk/values/PInput.java |    4 +-
 .../org/apache/beam/sdk/values/POutput.java     |    4 +-
 .../java/org/apache/beam/sdk/values/PValue.java |    4 +-
 .../org/apache/beam/sdk/values/PValueBase.java  |    6 +-
 .../apache/beam/sdk/values/TaggedPValue.java    |    5 +
 .../org/apache/beam/sdk/values/TupleTag.java    |   26 +-
 .../apache/beam/sdk/values/TupleTagList.java    |    2 +-
 .../apache/beam/sdk/values/TypeDescriptors.java |   25 +-
 .../org/apache/beam/sdk/values/TypedPValue.java |    4 +-
 .../java/org/apache/beam/sdk/PipelineTest.java  |  108 +-
 .../org/apache/beam/sdk/io/PubsubIOTest.java    |  197 ---
 .../beam/sdk/io/PubsubUnboundedSinkTest.java    |  190 ---
 .../beam/sdk/io/PubsubUnboundedSourceTest.java  |  411 -----
 .../apache/beam/sdk/metrics/MetricsTest.java    |    2 +-
 .../sdk/runners/TransformHierarchyTest.java     |   72 +-
 .../beam/sdk/testing/StaticWindowsTest.java     |   10 +-
 .../apache/beam/sdk/transforms/CreateTest.java  |    8 +-
 .../beam/sdk/transforms/MapElementsTest.java    |   25 +-
 .../apache/beam/sdk/transforms/ParDoTest.java   |  293 ++--
 .../beam/sdk/transforms/SplittableDoFnTest.java |   58 +-
 .../transforms/reflect/DoFnInvokersTest.java    |  153 +-
 .../DoFnSignaturesProcessElementTest.java       |    2 +-
 .../DoFnSignaturesSplittableDoFnTest.java       |  117 +-
 .../splittabledofn/OffsetRangeTrackerTest.java  |   49 +-
 .../sdk/transforms/windowing/TriggersTest.java  |  100 --
 .../apache/beam/sdk/util/PubsubClientTest.java  |  189 ---
 .../beam/sdk/util/PubsubGrpcClientTest.java     |  207 ---
 .../beam/sdk/util/PubsubJsonClientTest.java     |  140 --
 .../beam/sdk/util/PubsubTestClientTest.java     |  114 --
 .../beam/sdk/util/WindowingStrategiesTest.java  |   91 --
 .../beam/sdk/values/PCollectionListTest.java    |   70 +-
 .../beam/sdk/values/PCollectionTupleTest.java   |   13 +-
 .../apache/beam/sdk/values/TypedPValueTest.java |   46 +-
 .../beam/fn/harness/fake/FakeStepContext.java   |    2 +-
 .../control/ProcessBundleHandlerTest.java       |   30 +-
 sdks/java/io/common/pom.xml                     |    4 +
 .../apache/beam/sdk/io/common/HashingFn.java    |  109 ++
 .../sdk/io/elasticsearch/ElasticsearchIO.java   |    2 +
 sdks/java/io/google-cloud-platform/pom.xml      |   75 +-
 .../sdk/io/gcp/bigquery/BatchLoadBigQuery.java  |    7 +-
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    |   59 +-
 .../io/gcp/bigquery/BigQueryTableSource.java    |   30 +-
 .../sdk/io/gcp/bigquery/WritePartition.java     |    6 +-
 .../beam/sdk/io/gcp/bigquery/WriteResult.java   |   11 +-
 .../beam/sdk/io/gcp/pubsub/PubsubClient.java    |  544 +++++++
 .../sdk/io/gcp/pubsub/PubsubGrpcClient.java     |  424 +++++
 .../apache/beam/sdk/io/gcp/pubsub/PubsubIO.java | 1014 ++++++++++++
 .../sdk/io/gcp/pubsub/PubsubJsonClient.java     |  319 ++++
 .../sdk/io/gcp/pubsub/PubsubTestClient.java     |  436 ++++++
 .../sdk/io/gcp/pubsub/PubsubUnboundedSink.java  |  490 ++++++
 .../io/gcp/pubsub/PubsubUnboundedSource.java    | 1463 ++++++++++++++++++
 .../beam/sdk/io/gcp/pubsub/package-info.java    |   24 +
 .../beam/sdk/io/gcp/GcpApiSurfaceTest.java      |    5 +-
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     |    4 +-
 .../sdk/io/gcp/pubsub/PubsubClientTest.java     |  189 +++
 .../sdk/io/gcp/pubsub/PubsubGrpcClientTest.java |  208 +++
 .../beam/sdk/io/gcp/pubsub/PubsubIOTest.java    |  189 +++
 .../sdk/io/gcp/pubsub/PubsubJsonClientTest.java |  139 ++
 .../sdk/io/gcp/pubsub/PubsubTestClientTest.java |  114 ++
 .../io/gcp/pubsub/PubsubUnboundedSinkTest.java  |  188 +++
 .../gcp/pubsub/PubsubUnboundedSourceTest.java   |  409 +++++
 .../hadoop/inputformat/HadoopInputFormatIO.java |   36 +-
 .../inputformat/HadoopInputFormatIOTest.java    |   99 +-
 sdks/java/io/hadoop/jdk1.8-tests/pom.xml        |   46 +-
 .../inputformat/HIFIOWithElasticTest.java       |    6 +-
 .../HIFIOWithEmbeddedCassandraTest.java         |  215 +++
 .../hadoop/inputformat/hashing/HashingFn.java   |  109 --
 .../integration/tests/HIFIOCassandraIT.java     |    8 +-
 .../integration/tests/HIFIOElasticIT.java       |    6 +-
 .../SmallITCluster/cassandra-svc-rc.yaml        |   88 --
 .../cassandra/SmallITCluster/start-up.sh        |   21 -
 .../cassandra/SmallITCluster/teardown.sh        |   21 -
 .../kubernetes/cassandra/data-load-setup.sh     |   29 -
 .../resources/kubernetes/cassandra/data-load.sh |   67 -
 .../LargeProductionCluster/es-services.yaml     |  277 ----
 .../LargeProductionCluster/start-up.sh          |   21 -
 .../LargeProductionCluster/teardown.sh          |   20 -
 .../SmallITCluster/elasticsearch-svc-rc.yaml    |   84 -
 .../elasticsearch/SmallITCluster/start-up.sh    |   22 -
 .../elasticsearch/SmallITCluster/teardown.sh    |   20 -
 .../kubernetes/elasticsearch/data-load-setup.sh |   26 -
 .../kubernetes/elasticsearch/data-load.sh       |   33 -
 .../kubernetes/elasticsearch/es_test_data.py    |  299 ----
 .../kubernetes/elasticsearch/show-health.sh     |   25 -
 sdks/java/io/hadoop/pom.xml                     |   12 +-
 .../apache/beam/sdk/io/hdfs/HDFSFileSink.java   |    2 +
 .../apache/beam/sdk/io/hdfs/HDFSFileSource.java |    2 +
 .../org/apache/beam/sdk/io/jdbc/JdbcIO.java     |   61 +-
 .../org/apache/beam/sdk/io/jdbc/JdbcIOTest.java |   10 +-
 .../kubernetes/postgres-pod-no-vol.yml          |   32 -
 .../kubernetes/postgres-service-public.yml      |   28 -
 .../jdbc/src/test/resources/kubernetes/setup.sh |   20 -
 .../src/test/resources/kubernetes/teardown.sh   |   20 -
 .../java/org/apache/beam/sdk/io/jms/JmsIO.java  |    2 +
 .../org/apache/beam/sdk/io/kafka/KafkaIO.java   |    2 +
 .../apache/beam/sdk/io/kinesis/KinesisIO.java   |    2 +
 .../beam/sdk/io/mongodb/MongoDbGridFSIO.java    |    9 +-
 .../apache/beam/sdk/io/mongodb/MongoDbIO.java   |    2 +
 .../org/apache/beam/sdk/io/mqtt/MqttIO.java     |    2 +
 .../transforms/FlatMapElementsJava8Test.java    |   10 +-
 .../sdk/transforms/MapElementsJava8Test.java    |   10 +-
 sdks/python/.pylintrc                           |    6 +-
 sdks/python/apache_beam/coders/coder_impl.py    |   50 +-
 sdks/python/apache_beam/coders/coders.py        |    6 +-
 .../examples/complete/game/user_score.py        |    8 +-
 .../examples/complete/top_wikipedia_sessions.py |    8 -
 .../examples/cookbook/group_with_coder.py       |    6 +-
 .../examples/snippets/snippets_test.py          |    3 +-
 sdks/python/apache_beam/examples/wordcount.py   |   48 +-
 .../apache_beam/examples/wordcount_debugging.py |    4 -
 sdks/python/apache_beam/internal/gcp/auth.py    |   80 +-
 .../apache_beam/internal/gcp/auth_test.py       |   44 -
 .../apache_beam/internal/gcp/json_value.py      |    6 -
 sdks/python/apache_beam/internal/pickler.py     |   20 +-
 sdks/python/apache_beam/io/avroio_test.py       |   14 +-
 sdks/python/apache_beam/io/concat_source.py     |   74 +-
 .../python/apache_beam/io/concat_source_test.py |   12 +-
 sdks/python/apache_beam/io/filebasedsource.py   |   54 +-
 .../apache_beam/io/filebasedsource_test.py      |   26 +-
 sdks/python/apache_beam/io/fileio.py            |   72 +-
 sdks/python/apache_beam/io/fileio_test.py       |   45 +-
 sdks/python/apache_beam/io/filesystem.py        |    3 +-
 sdks/python/apache_beam/io/filesystem_test.py   |  242 ++-
 sdks/python/apache_beam/io/filesystems_util.py  |   10 +-
 sdks/python/apache_beam/io/gcp/bigquery.py      |   38 +-
 .../io/gcp/datastore/v1/datastoreio.py          |   24 +-
 .../io/gcp/datastore/v1/datastoreio_test.py     |    4 +-
 .../apache_beam/io/gcp/datastore/v1/helper.py   |   16 +-
 .../io/gcp/datastore/v1/query_splitter.py       |    2 +-
 sdks/python/apache_beam/io/gcp/gcsfilesystem.py |    7 +-
 .../io/gcp/tests/bigquery_matcher.py            |    3 +-
 sdks/python/apache_beam/io/iobase.py            |    7 +-
 sdks/python/apache_beam/io/localfilesystem.py   |    3 +-
 sdks/python/apache_beam/io/range_trackers.py    |   19 +-
 sdks/python/apache_beam/io/source_test_utils.py |   79 +-
 .../apache_beam/io/source_test_utils_test.py    |   20 +-
 sdks/python/apache_beam/io/textio.py            |   16 +-
 sdks/python/apache_beam/io/textio_test.py       |   18 +-
 sdks/python/apache_beam/io/tfrecordio.py        |   12 +-
 sdks/python/apache_beam/metrics/cells.py        |   28 +-
 sdks/python/apache_beam/metrics/execution.py    |    3 +-
 sdks/python/apache_beam/metrics/metric.py       |    9 +-
 sdks/python/apache_beam/pipeline.py             |    4 +-
 sdks/python/apache_beam/pipeline_test.py        |   13 +-
 sdks/python/apache_beam/pvalue.py               |   16 +-
 sdks/python/apache_beam/runners/common.py       |    9 +-
 .../runners/dataflow/dataflow_metrics_test.py   |    3 +-
 .../runners/dataflow/dataflow_runner.py         |   39 +-
 .../runners/dataflow/dataflow_runner_test.py    |    2 +-
 .../runners/dataflow/internal/apiclient.py      |    7 +-
 .../runners/dataflow/internal/dependency.py     |    6 +-
 .../runners/dataflow/native_io/iobase_test.py   |    2 +-
 .../runners/dataflow/test_dataflow_runner.py    |   14 +-
 .../runners/direct/bundle_factory.py            |   14 +-
 .../consumer_tracking_pipeline_visitor_test.py  |   22 +-
 .../apache_beam/runners/direct/direct_runner.py |   27 +-
 .../runners/direct/evaluation_context.py        |   10 +-
 .../apache_beam/runners/direct/executor.py      |    9 +-
 .../runners/direct/transform_evaluator.py       |   38 -
 sdks/python/apache_beam/runners/runner.py       |   80 +-
 sdks/python/apache_beam/runners/runner_test.py  |   41 +
 .../apache_beam/tests/pipeline_verifiers.py     |    7 +-
 sdks/python/apache_beam/transforms/combiners.py |   62 +-
 .../apache_beam/transforms/combiners_test.py    |    4 +-
 sdks/python/apache_beam/transforms/core.py      |  192 ++-
 .../apache_beam/transforms/create_test.py       |  121 ++
 sdks/python/apache_beam/transforms/display.py   |    4 +-
 .../apache_beam/transforms/display_test.py      |   36 -
 .../python/apache_beam/transforms/ptransform.py |   38 +-
 .../apache_beam/transforms/ptransform_test.py   |   17 +-
 .../python/apache_beam/transforms/sideinputs.py |   11 +-
 .../apache_beam/transforms/sideinputs_test.py   |    6 +-
 sdks/python/apache_beam/transforms/trigger.py   |   26 +-
 .../apache_beam/transforms/trigger_test.py      |    6 +-
 sdks/python/apache_beam/typehints/decorators.py |   26 +-
 .../apache_beam/typehints/trivial_inference.py  |   26 +-
 .../typehints/trivial_inference_test.py         |    3 +-
 sdks/python/apache_beam/typehints/typecheck.py  |    7 +-
 sdks/python/apache_beam/typehints/typehints.py  |   66 +-
 .../apache_beam/typehints/typehints_test.py     |    7 +-
 .../apache_beam/utils/annotations_test.py       |    2 +-
 sdks/python/apache_beam/utils/path.py           |    3 +-
 .../apache_beam/utils/pipeline_options.py       |  109 +-
 .../apache_beam/utils/pipeline_options_test.py  |   52 +-
 sdks/python/apache_beam/utils/proto_utils.py    |   15 +-
 sdks/python/apache_beam/utils/retry.py          |   14 +-
 sdks/python/apache_beam/utils/timestamp.py      |    6 +-
 sdks/python/apache_beam/utils/value_provider.py |  110 --
 .../apache_beam/utils/value_provider_test.py    |  165 --
 sdks/python/apache_beam/utils/windowed_value.py |   17 +-
 sdks/python/run_postcommit.sh                   |    6 +-
 sdks/python/run_pylint.sh                       |    2 +-
 sdks/python/setup.py                            |    8 +-
 sdks/python/tox.ini                             |   18 +-
 490 files changed, 17307 insertions(+), 17743 deletions(-)
----------------------------------------------------------------------



[06/50] [abbrv] beam git commit: Rename DoFn.Context#sideOutput to output

Posted by ke...@apache.org.
Rename DoFn.Context#sideOutput to output

Having two methods, both named output, one which takes the "main output
type" and one that takes a tag to specify the type more clearly
communicates the actual behavior - sideOutput isn't a "special" way to
output, it's the same as output(T), just to a specified PCollection.
This will help pipeline authors understand the actual behavior of
outputting to a tag, and detangle it from "sideInput", which is a
special way to receive input. Giving them the same name means that it's
not even strange to call output and provide the main output type, which
is what we want - it's a more specific way to output, but does not have
different restrictions or capabilities.

Rename internal references to SideOutput, SideOutputT, etc to (largely)
AdditionalOutput(T).


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/113471d6
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/113471d6
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/113471d6

Branch: refs/heads/jstorm-runner
Commit: 113471d6457b4afa2523afc74b40be09935292d0
Parents: 89ff0b1
Author: Thomas Groh <tg...@google.com>
Authored: Mon Apr 10 17:14:15 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Fri Apr 14 14:40:57 2017 -0700

----------------------------------------------------------------------
 .../apex/translation/ParDoTranslator.java       |   6 +-
 .../operators/ApexGroupByKeyOperator.java       |  19 +-
 .../operators/ApexParDoOperator.java            |  48 +--
 .../apex/translation/utils/NoOpStepContext.java |   2 +-
 .../apex/translation/ParDoTranslatorTest.java   |  18 +-
 .../beam/runners/core/BaseExecutionContext.java |  13 +-
 .../apache/beam/runners/core/DoFnAdapters.java  |  16 +-
 .../apache/beam/runners/core/DoFnRunners.java   |   8 +-
 .../beam/runners/core/ExecutionContext.java     |  13 +-
 .../GroupAlsoByWindowViaWindowSetNewDoFn.java   |   6 +-
 .../org/apache/beam/runners/core/OldDoFn.java   |  38 +--
 ...eBoundedSplittableProcessElementInvoker.java |   8 +-
 .../beam/runners/core/OutputWindowedValue.java  |  10 +-
 .../beam/runners/core/SimpleDoFnRunner.java     |  54 ++--
 .../beam/runners/core/SimpleOldDoFnRunner.java  |  63 ++--
 .../beam/runners/core/SplittableParDo.java      |  21 +-
 .../beam/runners/core/WindowingInternals.java   |   8 +-
 .../core/WindowingInternalsAdapters.java        |   8 +-
 .../core/GroupAlsoByWindowsProperties.java      |  10 +-
 .../apache/beam/runners/core/NoOpOldDoFn.java   |   4 +-
 .../apache/beam/runners/core/OldDoFnTest.java   |   4 +-
 ...ndedSplittableProcessElementInvokerTest.java |   6 +-
 .../beam/runners/core/ReduceFnTester.java       |   8 +-
 .../runners/core/SimpleOldDoFnRunnerTest.java   |   4 +-
 .../beam/runners/core/SplittableParDoTest.java  |   8 +-
 .../GroupAlsoByWindowEvaluatorFactory.java      |   8 +-
 .../beam/runners/direct/ParDoEvaluator.java     |   4 +-
 .../runners/direct/ParDoEvaluatorFactory.java   |  10 +-
 .../direct/ParDoMultiOverrideFactory.java       |   2 +-
 ...littableProcessElementsEvaluatorFactory.java |   8 +-
 .../direct/StatefulParDoEvaluatorFactory.java   |   2 +-
 .../beam/runners/direct/ParDoEvaluatorTest.java |   6 +-
 .../FlinkStreamingTransformTranslators.java     |  20 +-
 .../functions/FlinkDoFnFunction.java            |   4 +-
 .../functions/FlinkNoOpStepContext.java         |   2 +-
 .../functions/FlinkStatefulDoFnFunction.java    |   4 +-
 .../wrappers/streaming/DoFnOperator.java        |  14 +-
 .../streaming/SplittableDoFnOperator.java       |  10 +-
 .../wrappers/streaming/WindowDoFnOperator.java  |   4 +-
 .../flink/streaming/DoFnOperatorTest.java       |  34 +--
 .../dataflow/BatchStatefulParDoOverrides.java   |   2 +-
 .../runners/dataflow/BatchViewOverrides.java    |   6 +-
 .../dataflow/BatchViewOverridesTest.java        |   4 +-
 .../SparkGroupAlsoByWindowViaWindowSet.java     |  10 +-
 .../spark/translation/MultiDoFnFunction.java    |   4 +-
 ...SparkGroupAlsoByWindowViaOutputBufferFn.java |   8 +-
 .../spark/translation/SparkProcessContext.java  |   2 +-
 .../streaming/StreamingTransformTranslator.java |   2 +-
 .../org/apache/beam/sdk/transforms/Combine.java |   2 +-
 .../org/apache/beam/sdk/transforms/DoFn.java    |  14 +-
 .../apache/beam/sdk/transforms/DoFnTester.java  |  41 ++-
 .../org/apache/beam/sdk/transforms/ParDo.java   |  77 +++--
 .../apache/beam/sdk/transforms/Partition.java   |   2 +-
 .../beam/sdk/values/PCollectionTuple.java       |   3 +-
 .../org/apache/beam/sdk/values/TupleTag.java    |  26 +-
 .../apache/beam/sdk/values/TupleTagList.java    |   2 +-
 .../org/apache/beam/sdk/values/TypedPValue.java |   4 +-
 .../apache/beam/sdk/metrics/MetricsTest.java    |   2 +-
 .../apache/beam/sdk/transforms/ParDoTest.java   | 293 ++++++++++---------
 .../beam/sdk/transforms/SplittableDoFnTest.java |  21 +-
 .../beam/sdk/values/PCollectionTupleTest.java   |   8 +-
 .../apache/beam/sdk/values/TypedPValueTest.java |  46 +--
 .../beam/fn/harness/fake/FakeStepContext.java   |   2 +-
 .../control/ProcessBundleHandlerTest.java       |  30 +-
 .../sdk/io/gcp/bigquery/WritePartition.java     |   6 +-
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     |   4 +-
 66 files changed, 578 insertions(+), 578 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/ParDoTranslator.java
----------------------------------------------------------------------
diff --git a/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/ParDoTranslator.java b/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/ParDoTranslator.java
index 9213c1f..2e3d902 100644
--- a/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/ParDoTranslator.java
+++ b/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/ParDoTranslator.java
@@ -94,7 +94,7 @@ class ParDoTranslator<InputT, OutputT>
             context.getPipelineOptions(),
             doFn,
             transform.getMainOutputTag(),
-            transform.getSideOutputTags().getAll(),
+            transform.getAdditionalOutputTags().getAll(),
             input.getWindowingStrategy(),
             sideInputs,
             wvInputCoder,
@@ -114,9 +114,9 @@ class ParDoTranslator<InputT, OutputT>
         ports.put(pc, operator.output);
       } else {
         int portIndex = 0;
-        for (TupleTag<?> tag : transform.getSideOutputTags().getAll()) {
+        for (TupleTag<?> tag : transform.getAdditionalOutputTags().getAll()) {
           if (tag.equals(output.getKey())) {
-            ports.put(pc, operator.sideOutputPorts[portIndex]);
+            ports.put(pc, operator.additionalOutputPorts[portIndex]);
             break;
           }
           portIndex++;

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexGroupByKeyOperator.java
----------------------------------------------------------------------
diff --git a/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexGroupByKeyOperator.java b/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexGroupByKeyOperator.java
index 230082e..1697921 100644
--- a/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexGroupByKeyOperator.java
+++ b/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexGroupByKeyOperator.java
@@ -353,13 +353,14 @@ public class ApexGroupByKeyOperator<K, V> implements Operator {
         }
 
         @Override
-        public <SideOutputT> void sideOutputWindowedValue(
-            TupleTag<SideOutputT> tag,
-            SideOutputT output,
+        public <AdditionalOutputT> void outputWindowedValue(
+            TupleTag<AdditionalOutputT> tag,
+            AdditionalOutputT output,
             Instant timestamp,
             Collection<? extends BoundedWindow> windows,
             PaneInfo pane) {
-          throw new UnsupportedOperationException("GroupAlsoByWindow should not use side outputs");
+          throw new UnsupportedOperationException(
+              "GroupAlsoByWindow should not use tagged outputs");
         }
 
         @Override
@@ -390,15 +391,13 @@ public class ApexGroupByKeyOperator<K, V> implements Operator {
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
-      // ignore the side output, this can happen when a user does not register
-      // side outputs but then outputs using a freshly created TupleTag.
-      throw new RuntimeException("sideOutput() is not available when grouping by window.");
+    public <T> void output(TupleTag<T> tag, T output) {
+      throw new RuntimeException("output() is not available when grouping by window.");
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
-      sideOutput(tag, output);
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+      output(tag, output);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexParDoOperator.java
----------------------------------------------------------------------
diff --git a/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexParDoOperator.java b/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexParDoOperator.java
index 1fc91c8..bad5be2 100644
--- a/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexParDoOperator.java
+++ b/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/operators/ApexParDoOperator.java
@@ -88,7 +88,7 @@ public class ApexParDoOperator<InputT, OutputT> extends BaseOperator implements
   @Bind(JavaSerializer.class)
   private final TupleTag<OutputT> mainOutputTag;
   @Bind(JavaSerializer.class)
-  private final List<TupleTag<?>> sideOutputTags;
+  private final List<TupleTag<?>> additionalOutputTags;
   @Bind(JavaSerializer.class)
   private final WindowingStrategy<?, ?> windowingStrategy;
   @Bind(JavaSerializer.class)
@@ -108,15 +108,15 @@ public class ApexParDoOperator<InputT, OutputT> extends BaseOperator implements
 
   private transient PushbackSideInputDoFnRunner<InputT, OutputT> pushbackDoFnRunner;
   private transient SideInputHandler sideInputHandler;
-  private transient Map<TupleTag<?>, DefaultOutputPort<ApexStreamTuple<?>>> sideOutputPortMapping =
-      Maps.newHashMapWithExpectedSize(5);
+  private transient Map<TupleTag<?>, DefaultOutputPort<ApexStreamTuple<?>>>
+      additionalOutputPortMapping = Maps.newHashMapWithExpectedSize(5);
   private transient DoFnInvoker<InputT, OutputT> doFnInvoker;
 
   public ApexParDoOperator(
       ApexPipelineOptions pipelineOptions,
       DoFn<InputT, OutputT> doFn,
       TupleTag<OutputT> mainOutputTag,
-      List<TupleTag<?>> sideOutputTags,
+      List<TupleTag<?>> additionalOutputTags,
       WindowingStrategy<?, ?> windowingStrategy,
       List<PCollectionView<?>> sideInputs,
       Coder<WindowedValue<InputT>> inputCoder,
@@ -125,15 +125,15 @@ public class ApexParDoOperator<InputT, OutputT> extends BaseOperator implements
     this.pipelineOptions = new SerializablePipelineOptions(pipelineOptions);
     this.doFn = doFn;
     this.mainOutputTag = mainOutputTag;
-    this.sideOutputTags = sideOutputTags;
+    this.additionalOutputTags = additionalOutputTags;
     this.windowingStrategy = windowingStrategy;
     this.sideInputs = sideInputs;
     this.sideInputStateInternals = new StateInternalsProxy<>(
         stateBackend.newStateInternalsFactory(VoidCoder.of()));
 
-    if (sideOutputTags.size() > sideOutputPorts.length) {
-      String msg = String.format("Too many side outputs (currently only supporting %s).",
-          sideOutputPorts.length);
+    if (additionalOutputTags.size() > additionalOutputPorts.length) {
+      String msg = String.format("Too many additional outputs (currently only supporting %s).",
+          additionalOutputPorts.length);
       throw new UnsupportedOperationException(msg);
     }
 
@@ -148,7 +148,7 @@ public class ApexParDoOperator<InputT, OutputT> extends BaseOperator implements
     this.pipelineOptions = null;
     this.doFn = null;
     this.mainOutputTag = null;
-    this.sideOutputTags = null;
+    this.additionalOutputTags = null;
     this.windowingStrategy = null;
     this.sideInputs = null;
     this.pushedBack = null;
@@ -218,29 +218,31 @@ public class ApexParDoOperator<InputT, OutputT> extends BaseOperator implements
   public final transient DefaultOutputPort<ApexStreamTuple<?>> output = new DefaultOutputPort<>();
 
   @OutputPortFieldAnnotation(optional = true)
-  public final transient DefaultOutputPort<ApexStreamTuple<?>> sideOutput1 =
+  public final transient DefaultOutputPort<ApexStreamTuple<?>> additionalOutput1 =
       new DefaultOutputPort<>();
   @OutputPortFieldAnnotation(optional = true)
-  public final transient DefaultOutputPort<ApexStreamTuple<?>> sideOutput2 =
+  public final transient DefaultOutputPort<ApexStreamTuple<?>> additionalOutput2 =
       new DefaultOutputPort<>();
   @OutputPortFieldAnnotation(optional = true)
-  public final transient DefaultOutputPort<ApexStreamTuple<?>> sideOutput3 =
+  public final transient DefaultOutputPort<ApexStreamTuple<?>> additionalOutput3 =
       new DefaultOutputPort<>();
   @OutputPortFieldAnnotation(optional = true)
-  public final transient DefaultOutputPort<ApexStreamTuple<?>> sideOutput4 =
+  public final transient DefaultOutputPort<ApexStreamTuple<?>> additionalOutput4 =
       new DefaultOutputPort<>();
   @OutputPortFieldAnnotation(optional = true)
-  public final transient DefaultOutputPort<ApexStreamTuple<?>> sideOutput5 =
+  public final transient DefaultOutputPort<ApexStreamTuple<?>> additionalOutput5 =
       new DefaultOutputPort<>();
 
-  public final transient DefaultOutputPort<?>[] sideOutputPorts = {sideOutput1, sideOutput2,
-      sideOutput3, sideOutput4, sideOutput5};
+  public final transient DefaultOutputPort<?>[] additionalOutputPorts = {
+    additionalOutput1, additionalOutput2, additionalOutput3, additionalOutput4, additionalOutput5
+  };
 
   @Override
   public <T> void output(TupleTag<T> tag, WindowedValue<T> tuple) {
-    DefaultOutputPort<ApexStreamTuple<?>> sideOutputPort = sideOutputPortMapping.get(tag);
-    if (sideOutputPort != null) {
-      sideOutputPort.emit(ApexStreamTuple.DataTuple.of(tuple));
+    DefaultOutputPort<ApexStreamTuple<?>> additionalOutputPort =
+        additionalOutputPortMapping.get(tag);
+    if (additionalOutputPort != null) {
+      additionalOutputPort.emit(ApexStreamTuple.DataTuple.of(tuple));
     } else {
       output.emit(ApexStreamTuple.DataTuple.of(tuple));
     }
@@ -306,11 +308,11 @@ public class ApexParDoOperator<InputT, OutputT> extends BaseOperator implements
       sideInputReader = sideInputHandler;
     }
 
-    for (int i = 0; i < sideOutputTags.size(); i++) {
+    for (int i = 0; i < additionalOutputTags.size(); i++) {
       @SuppressWarnings("unchecked")
       DefaultOutputPort<ApexStreamTuple<?>> port = (DefaultOutputPort<ApexStreamTuple<?>>)
-          sideOutputPorts[i];
-      sideOutputPortMapping.put(sideOutputTags.get(i), port);
+          additionalOutputPorts[i];
+      additionalOutputPortMapping.put(additionalOutputTags.get(i), port);
     }
 
     NoOpStepContext stepContext = new NoOpStepContext() {
@@ -332,7 +334,7 @@ public class ApexParDoOperator<InputT, OutputT> extends BaseOperator implements
         sideInputReader,
         this,
         mainOutputTag,
-        sideOutputTags,
+        additionalOutputTags,
         stepContext,
         new NoOpAggregatorFactory(),
         windowingStrategy

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/utils/NoOpStepContext.java
----------------------------------------------------------------------
diff --git a/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/utils/NoOpStepContext.java b/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/utils/NoOpStepContext.java
index ad4de97..cc64c7c 100644
--- a/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/utils/NoOpStepContext.java
+++ b/runners/apex/src/main/java/org/apache/beam/runners/apex/translation/utils/NoOpStepContext.java
@@ -48,7 +48,7 @@ public class NoOpStepContext implements ExecutionContext.StepContext, Serializab
   }
 
   @Override
-  public void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output) {
+  public void noteOutput(TupleTag<?> tag, WindowedValue<?> output) {
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/apex/src/test/java/org/apache/beam/runners/apex/translation/ParDoTranslatorTest.java
----------------------------------------------------------------------
diff --git a/runners/apex/src/test/java/org/apache/beam/runners/apex/translation/ParDoTranslatorTest.java b/runners/apex/src/test/java/org/apache/beam/runners/apex/translation/ParDoTranslatorTest.java
index 2760d06..1a5c8be 100644
--- a/runners/apex/src/test/java/org/apache/beam/runners/apex/translation/ParDoTranslatorTest.java
+++ b/runners/apex/src/test/java/org/apache/beam/runners/apex/translation/ParDoTranslatorTest.java
@@ -267,7 +267,7 @@ public class ParDoTranslatorTest {
 
     List<Integer> inputs = Arrays.asList(3, -42, 666);
     final TupleTag<String> mainOutputTag = new TupleTag<>("main");
-    final TupleTag<Void> sideOutputTag = new TupleTag<>("sideOutput");
+    final TupleTag<Void> additionalOutputTag = new TupleTag<>("output");
 
     PCollectionView<Integer> sideInput1 = pipeline
         .apply("CreateSideInput1", Create.of(11))
@@ -288,10 +288,10 @@ public class ParDoTranslatorTest {
             .withSideInputs(sideInput1)
             .withSideInputs(sideInputUnread)
             .withSideInputs(sideInput2)
-            .withOutputTags(mainOutputTag, TupleTagList.of(sideOutputTag)));
+            .withOutputTags(mainOutputTag, TupleTagList.of(additionalOutputTag)));
 
     outputs.get(mainOutputTag).apply(ParDo.of(new EmbeddedCollector()));
-    outputs.get(sideOutputTag).setCoder(VoidCoder.of());
+    outputs.get(additionalOutputTag).setCoder(VoidCoder.of());
     ApexRunnerResult result = (ApexRunnerResult) pipeline.run();
 
     HashSet<String> expected = Sets.newHashSet("processing: 3: [11, 222]",
@@ -312,12 +312,12 @@ public class ParDoTranslatorTest {
     private static final long serialVersionUID = 1L;
 
     final List<PCollectionView<Integer>> sideInputViews = new ArrayList<>();
-    final List<TupleTag<String>> sideOutputTupleTags = new ArrayList<>();
+    final List<TupleTag<String>> additionalOutputTupleTags = new ArrayList<>();
 
     public TestMultiOutputWithSideInputsFn(List<PCollectionView<Integer>> sideInputViews,
-        List<TupleTag<String>> sideOutputTupleTags) {
+        List<TupleTag<String>> additionalOutputTupleTags) {
       this.sideInputViews.addAll(sideInputViews);
-      this.sideOutputTupleTags.addAll(sideOutputTupleTags);
+      this.additionalOutputTupleTags.addAll(additionalOutputTupleTags);
     }
 
     @ProcessElement
@@ -334,9 +334,9 @@ public class ParDoTranslatorTest {
         value += ": " + sideInputValues;
       }
       c.output(value);
-      for (TupleTag<String> sideOutputTupleTag : sideOutputTupleTags) {
-        c.sideOutput(sideOutputTupleTag,
-                     sideOutputTupleTag.getId() + ": " + value);
+      for (TupleTag<String> additionalOutputTupleTag : additionalOutputTupleTags) {
+        c.output(additionalOutputTupleTag,
+                     additionalOutputTupleTag.getId() + ": " + value);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/BaseExecutionContext.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/BaseExecutionContext.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/BaseExecutionContext.java
index 0f23fea..cc7b574 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/BaseExecutionContext.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/BaseExecutionContext.java
@@ -23,6 +23,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn.Context;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.TupleTag;
@@ -106,19 +107,17 @@ public abstract class BaseExecutionContext<T extends ExecutionContext.StepContex
 
   /**
    * Hook for subclasses to implement that will be called whenever
-   * {@code DoFn.Context#output}
-   * is called.
+   * {@link Context#output(Object)} is called.
    */
   @Override
   public void noteOutput(WindowedValue<?> output) {}
 
   /**
    * Hook for subclasses to implement that will be called whenever
-   * {@code DoFn.Context#sideOutput}
-   * is called.
+   * {@link Context#output(TupleTag, Object)} is called.
    */
   @Override
-  public void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output) {}
+  public void noteOutput(TupleTag<?> tag, WindowedValue<?> output) {}
 
   /**
    * Base class for implementations of {@link ExecutionContext.StepContext}.
@@ -153,8 +152,8 @@ public abstract class BaseExecutionContext<T extends ExecutionContext.StepContex
     }
 
     @Override
-    public void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output) {
-      executionContext.noteSideOutput(tag, output);
+    public void noteOutput(TupleTag<?> tag, WindowedValue<?> output) {
+      executionContext.noteOutput(tag, output);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnAdapters.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnAdapters.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnAdapters.java
index deb3b7e..66ad736 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnAdapters.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnAdapters.java
@@ -162,13 +162,13 @@ public class DoFnAdapters {
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
-      context.sideOutput(tag, output);
+    public <T> void output(TupleTag<T> tag, T output) {
+      context.output(tag, output);
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
-      context.sideOutputWithTimestamp(tag, output, timestamp);
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+      context.outputWithTimestamp(tag, output, timestamp);
     }
 
     @Override
@@ -255,13 +255,13 @@ public class DoFnAdapters {
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
-      context.sideOutput(tag, output);
+    public <T> void output(TupleTag<T> tag, T output) {
+      context.output(tag, output);
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
-      context.sideOutputWithTimestamp(tag, output, timestamp);
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+      context.outputWithTimestamp(tag, output, timestamp);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnRunners.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnRunners.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnRunners.java
index a1b7c8b..b09ee08 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnRunners.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/DoFnRunners.java
@@ -59,7 +59,7 @@ public class DoFnRunners {
       SideInputReader sideInputReader,
       OutputManager outputManager,
       TupleTag<OutputT> mainOutputTag,
-      List<TupleTag<?>> sideOutputTags,
+      List<TupleTag<?>> additionalOutputTags,
       StepContext stepContext,
       AggregatorFactory aggregatorFactory,
       WindowingStrategy<?, ?> windowingStrategy) {
@@ -69,7 +69,7 @@ public class DoFnRunners {
         sideInputReader,
         outputManager,
         mainOutputTag,
-        sideOutputTags,
+        additionalOutputTags,
         stepContext,
         aggregatorFactory,
         windowingStrategy);
@@ -86,7 +86,7 @@ public class DoFnRunners {
       SideInputReader sideInputReader,
       OutputManager outputManager,
       TupleTag<OutputT> mainOutputTag,
-      List<TupleTag<?>> sideOutputTags,
+      List<TupleTag<?>> additionalOutputTags,
       StepContext stepContext,
       AggregatorFactory aggregatorFactory,
       WindowingStrategy<?, ?> windowingStrategy) {
@@ -96,7 +96,7 @@ public class DoFnRunners {
         sideInputReader,
         outputManager,
         mainOutputTag,
-        sideOutputTags,
+        additionalOutputTags,
         stepContext,
         aggregatorFactory,
         windowingStrategy);

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/ExecutionContext.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/ExecutionContext.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/ExecutionContext.java
index 40c0798..ecd30c0 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/ExecutionContext.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/ExecutionContext.java
@@ -20,6 +20,7 @@ package org.apache.beam.runners.core;
 import java.io.IOException;
 import java.util.Collection;
 import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.transforms.DoFn.Context;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.TupleTag;
@@ -41,17 +42,15 @@ public interface ExecutionContext {
 
   /**
    * Hook for subclasses to implement that will be called whenever
-   * {@link org.apache.beam.sdk.transforms.DoFn.Context#output}
-   * is called.
+   * {@link Context#output(TupleTag, Object)} is called.
    */
   void noteOutput(WindowedValue<?> output);
 
   /**
    * Hook for subclasses to implement that will be called whenever
-   * {@link org.apache.beam.sdk.transforms.DoFn.Context#sideOutput}
-   * is called.
+   * {@link Context#output(TupleTag, Object)} is called.
    */
-  void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output);
+  void noteOutput(TupleTag<?> tag, WindowedValue<?> output);
 
   /**
    * Per-step, per-key context used for retrieving state.
@@ -77,10 +76,10 @@ public interface ExecutionContext {
 
     /**
      * Hook for subclasses to implement that will be called whenever
-     * {@link org.apache.beam.sdk.transforms.DoFn.Context#sideOutput}
+     * {@link org.apache.beam.sdk.transforms.DoFn.Context#output}
      * is called.
      */
-    void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output);
+    void noteOutput(TupleTag<?> tag, WindowedValue<?> output);
 
     /**
      * Writes the given {@code PCollectionView} data to a globally accessible location.

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetNewDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetNewDoFn.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetNewDoFn.java
index 8fff0e4..0cf6e2d 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetNewDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/GroupAlsoByWindowViaWindowSetNewDoFn.java
@@ -104,9 +104,9 @@ public class GroupAlsoByWindowViaWindowSetNewDoFn<
       }
 
       @Override
-      public <SideOutputT> void sideOutputWindowedValue(
-              TupleTag<SideOutputT> tag,
-              SideOutputT output,
+      public <AdditionalOutputT> void outputWindowedValue(
+              TupleTag<AdditionalOutputT> tag,
+              AdditionalOutputT output,
               Instant timestamp,
               Collection<? extends BoundedWindow> windows,
               PaneInfo pane) {

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/OldDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/OldDoFn.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/OldDoFn.java
index e9d4740..507ee50 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/OldDoFn.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/OldDoFn.java
@@ -135,16 +135,15 @@ public abstract class OldDoFn<InputT, OutputT> implements Serializable, HasDispl
     public abstract void outputWithTimestamp(OutputT output, Instant timestamp);
 
     /**
-     * Adds the given element to the side output {@code PCollection} with the
+     * Adds the given element to the output {@code PCollection} with the
      * given tag.
      *
-     * <p>Once passed to {@code sideOutput} the element should not be modified
+     * <p>Once passed to {@code output} the element should not be modified
      * in any way.
      *
      * <p>The caller of {@code ParDo} uses {@link ParDo.SingleOutput#withOutputTags withOutputTags}
-     * to specify the tags of side outputs that it consumes. Non-consumed side
-     * outputs, e.g., outputs for monitoring purposes only, don't necessarily
-     * need to be specified.
+     * to specify the tags of outputs that it consumes. Outputs that are not consumed, e.g., outputs
+     * for monitoring purposes only, don't necessarily need to be specified.
      *
      * <p>The output element will have the same timestamp and be in the same
      * windows as the input element passed to {@link OldDoFn#processElement processElement}.
@@ -159,32 +158,27 @@ public abstract class OldDoFn<InputT, OutputT> implements Serializable, HasDispl
      *
      * @see ParDo.SingleOutput#withOutputTags
      */
-    public abstract <T> void sideOutput(TupleTag<T> tag, T output);
+    public abstract <T> void output(TupleTag<T> tag, T output);
 
     /**
-     * Adds the given element to the specified side output {@code PCollection},
-     * with the given timestamp.
+     * Adds the given element to the specified output {@code PCollection}, with the given timestamp.
      *
-     * <p>Once passed to {@code sideOutputWithTimestamp} the element should not be
-     * modified in any way.
+     * <p>Once passed to {@code outputWithTimestamp} the element should not be modified in any way.
      *
-     * <p>If invoked from {@link OldDoFn#processElement processElement}, the timestamp
-     * must not be older than the input element's timestamp minus
-     * {@link OldDoFn#getAllowedTimestampSkew getAllowedTimestampSkew}.  The output element will
-     * be in the same windows as the input element.
+     * <p>If invoked from {@link OldDoFn#processElement processElement}, the timestamp must not be
+     * older than the input element's timestamp minus {@link OldDoFn#getAllowedTimestampSkew
+     * getAllowedTimestampSkew}. The output element will be in the same windows as the input
+     * element.
      *
      * <p>If invoked from {@link #startBundle startBundle} or {@link #finishBundle finishBundle},
-     * this will attempt to use the
-     * {@link org.apache.beam.sdk.transforms.windowing.WindowFn}
-     * of the input {@code PCollection} to determine what windows the element
-     * should be in, throwing an exception if the {@code WindowFn} attempts
-     * to access any information about the input element except for the
-     * timestamp.
+     * this will attempt to use the {@link org.apache.beam.sdk.transforms.windowing.WindowFn} of the
+     * input {@code PCollection} to determine what windows the element should be in, throwing an
+     * exception if the {@code WindowFn} attempts to access any information about the input element
+     * except for the timestamp.
      *
      * @see ParDo.SingleOutput#withOutputTags
      */
-    public abstract <T> void sideOutputWithTimestamp(
-        TupleTag<T> tag, T output, Instant timestamp);
+    public abstract <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp);
 
     /**
      * Creates an {@link Aggregator} in the {@link OldDoFn} context with the

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvoker.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvoker.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvoker.java
index 27fd0a3..d132af6 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvoker.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvoker.java
@@ -254,13 +254,13 @@ public class OutputAndTimeBoundedSplittableProcessElementInvoker<
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T value) {
-      sideOutputWithTimestamp(tag, value, element.getTimestamp());
+    public <T> void output(TupleTag<T> tag, T value) {
+      outputWithTimestamp(tag, value, element.getTimestamp());
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T value, Instant timestamp) {
-      output.sideOutputWindowedValue(
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T value, Instant timestamp) {
+      output.outputWindowedValue(
           tag, value, timestamp, element.getWindows(), element.getPane());
       noteOutput();
     }

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputWindowedValue.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputWindowedValue.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputWindowedValue.java
index 86eeb33..35d6737 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputWindowedValue.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/OutputWindowedValue.java
@@ -25,7 +25,7 @@ import org.joda.time.Instant;
 
 /**
  * An object that can output a value with all of its windowing information to the main output or
- * a side output.
+ * any tagged output.
  */
 public interface OutputWindowedValue<OutputT> {
   /** Outputs a value with windowing information to the main output. */
@@ -35,10 +35,10 @@ public interface OutputWindowedValue<OutputT> {
       Collection<? extends BoundedWindow> windows,
       PaneInfo pane);
 
-  /** Outputs a value with windowing information to a side output. */
-  <SideOutputT> void sideOutputWindowedValue(
-      TupleTag<SideOutputT> tag,
-      SideOutputT output,
+  /** Outputs a value with windowing information to a tagged output. */
+  <AdditionalOutputT> void outputWindowedValue(
+      TupleTag<AdditionalOutputT> tag,
+      AdditionalOutputT output,
       Instant timestamp,
       Collection<? extends BoundedWindow> windows,
       PaneInfo pane);

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleDoFnRunner.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleDoFnRunner.java
index 98d88b6..141bf20 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleDoFnRunner.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleDoFnRunner.java
@@ -106,7 +106,7 @@ public class SimpleDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, Out
       SideInputReader sideInputReader,
       OutputManager outputManager,
       TupleTag<OutputT> mainOutputTag,
-      List<TupleTag<?>> sideOutputTags,
+      List<TupleTag<?>> additionalOutputTags,
       StepContext stepContext,
       AggregatorFactory aggregatorFactory,
       WindowingStrategy<?, ?> windowingStrategy) {
@@ -133,7 +133,7 @@ public class SimpleDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, Out
             sideInputReader,
             outputManager,
             mainOutputTag,
-            sideOutputTags,
+            additionalOutputTags,
             stepContext,
             aggregatorFactory,
             windowingStrategy.getWindowFn());
@@ -257,7 +257,7 @@ public class SimpleDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, Out
         SideInputReader sideInputReader,
         OutputManager outputManager,
         TupleTag<OutputT> mainOutputTag,
-        List<TupleTag<?>> sideOutputTags,
+        List<TupleTag<?>> additionalOutputTags,
         StepContext stepContext,
         AggregatorFactory aggregatorFactory,
         WindowFn<?, ?> windowFn) {
@@ -270,8 +270,8 @@ public class SimpleDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, Out
       this.outputTags = Sets.newHashSet();
 
       outputTags.add(mainOutputTag);
-      for (TupleTag<?> sideOutputTag : sideOutputTags) {
-        outputTags.add(sideOutputTag);
+      for (TupleTag<?> additionalOutputTag : additionalOutputTags) {
+        outputTags.add(additionalOutputTag);
       }
 
       this.stepContext = stepContext;
@@ -355,16 +355,16 @@ public class SimpleDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, Out
       }
     }
 
-    private <T> void sideOutputWindowedValue(
+    private <T> void outputWindowedValue(
         TupleTag<T> tag,
         T output,
         Instant timestamp,
         Collection<? extends BoundedWindow> windows,
         PaneInfo pane) {
-      sideOutputWindowedValue(tag, makeWindowedValue(output, timestamp, windows, pane));
+      outputWindowedValue(tag, makeWindowedValue(output, timestamp, windows, pane));
     }
 
-    private <T> void sideOutputWindowedValue(TupleTag<T> tag, WindowedValue<T> windowedElem) {
+    private <T> void outputWindowedValue(TupleTag<T> tag, WindowedValue<T> windowedElem) {
       if (!outputTags.contains(tag)) {
         // This tag wasn't declared nor was it seen before during this execution.
         // Thus, this must be a new, undeclared and unconsumed output.
@@ -372,18 +372,18 @@ public class SimpleDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, Out
         // outputs.
         if (outputTags.size() >= MAX_SIDE_OUTPUTS) {
           throw new IllegalArgumentException(
-              "the number of side outputs has exceeded a limit of " + MAX_SIDE_OUTPUTS);
+              "the number of outputs has exceeded a limit of " + MAX_SIDE_OUTPUTS);
         }
         outputTags.add(tag);
       }
 
       outputManager.output(tag, windowedElem);
       if (stepContext != null) {
-        stepContext.noteSideOutput(tag, windowedElem);
+        stepContext.noteOutput(tag, windowedElem);
       }
     }
 
-    // Following implementations of output, outputWithTimestamp, and sideOutput
+    // Following implementations of output, outputWithTimestamp, and output
     // are only accessible in DoFn.startBundle and DoFn.finishBundle, and will be shadowed by
     // ProcessContext's versions in DoFn.processElement.
     @Override
@@ -397,15 +397,15 @@ public class SimpleDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, Out
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
-      checkNotNull(tag, "TupleTag passed to sideOutput cannot be null");
-      sideOutputWindowedValue(tag, output, null, null, PaneInfo.NO_FIRING);
+    public <T> void output(TupleTag<T> tag, T output) {
+      checkNotNull(tag, "TupleTag passed to output cannot be null");
+      outputWindowedValue(tag, output, null, null, PaneInfo.NO_FIRING);
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
-      checkNotNull(tag, "TupleTag passed to sideOutputWithTimestamp cannot be null");
-      sideOutputWindowedValue(tag, output, timestamp, null, PaneInfo.NO_FIRING);
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+      checkNotNull(tag, "TupleTag passed to outputWithTimestamp cannot be null");
+      outputWindowedValue(tag, output, timestamp, null, PaneInfo.NO_FIRING);
     }
 
     @Override
@@ -559,16 +559,16 @@ public class SimpleDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, Out
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
-      checkNotNull(tag, "Tag passed to sideOutput cannot be null");
-      context.sideOutputWindowedValue(tag, windowedValue.withValue(output));
+    public <T> void output(TupleTag<T> tag, T output) {
+      checkNotNull(tag, "Tag passed to output cannot be null");
+      context.outputWindowedValue(tag, windowedValue.withValue(output));
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
-      checkNotNull(tag, "Tag passed to sideOutputWithTimestamp cannot be null");
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+      checkNotNull(tag, "Tag passed to outputWithTimestamp cannot be null");
       checkTimestamp(timestamp);
-      context.sideOutputWindowedValue(
+      context.outputWindowedValue(
           tag, output, timestamp, windowedValue.getWindows(), windowedValue.getPane());
     }
 
@@ -787,14 +787,14 @@ public class SimpleDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, Out
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
-      context.sideOutputWindowedValue(
+    public <T> void output(TupleTag<T> tag, T output) {
+      context.outputWindowedValue(
           tag, output, timestamp, Collections.singleton(window()), PaneInfo.NO_FIRING);
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
-      context.sideOutputWindowedValue(
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+      context.outputWindowedValue(
           tag, output, timestamp, Collections.singleton(window()), PaneInfo.NO_FIRING);
     }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleOldDoFnRunner.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleOldDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleOldDoFnRunner.java
index c88f1c9..6320a3a 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleOldDoFnRunner.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/SimpleOldDoFnRunner.java
@@ -60,11 +60,16 @@ class SimpleOldDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, OutputT
   /** The context used for running the {@link OldDoFn}. */
   private final DoFnContext<InputT, OutputT> context;
 
-  public SimpleOldDoFnRunner(PipelineOptions options, OldDoFn<InputT, OutputT> fn,
+  public SimpleOldDoFnRunner(
+      PipelineOptions options,
+      OldDoFn<InputT, OutputT> fn,
       SideInputReader sideInputReader,
       OutputManager outputManager,
-      TupleTag<OutputT> mainOutputTag, List<TupleTag<?>> sideOutputTags, StepContext stepContext,
-      AggregatorFactory aggregatorFactory, WindowingStrategy<?, ?> windowingStrategy) {
+      TupleTag<OutputT> mainOutputTag,
+      List<TupleTag<?>> additionalOutputTags,
+      StepContext stepContext,
+      AggregatorFactory aggregatorFactory,
+      WindowingStrategy<?, ?> windowingStrategy) {
     this.fn = fn;
     this.context = new DoFnContext<>(
         options,
@@ -72,7 +77,7 @@ class SimpleOldDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, OutputT
         sideInputReader,
         outputManager,
         mainOutputTag,
-        sideOutputTags,
+        additionalOutputTags,
         stepContext,
         aggregatorFactory,
         windowingStrategy == null ? null : windowingStrategy.getWindowFn());
@@ -177,7 +182,7 @@ class SimpleOldDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, OutputT
                        SideInputReader sideInputReader,
                        OutputManager outputManager,
                        TupleTag<OutputT> mainOutputTag,
-                       List<TupleTag<?>> sideOutputTags,
+                       List<TupleTag<?>> additionalOutputTags,
                        StepContext stepContext,
                        AggregatorFactory aggregatorFactory,
                        WindowFn<?, ?> windowFn) {
@@ -190,8 +195,8 @@ class SimpleOldDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, OutputT
       this.outputTags = Sets.newHashSet();
 
       outputTags.add(mainOutputTag);
-      for (TupleTag<?> sideOutputTag : sideOutputTags) {
-        outputTags.add(sideOutputTag);
+      for (TupleTag<?> additionalOutputTag : additionalOutputTags) {
+        outputTags.add(additionalOutputTag);
       }
 
       this.stepContext = stepContext;
@@ -273,15 +278,15 @@ class SimpleOldDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, OutputT
       }
     }
 
-    private <T> void sideOutputWindowedValue(TupleTag<T> tag,
+    private <T> void outputWindowedValue(TupleTag<T> tag,
                                                T output,
                                                Instant timestamp,
                                                Collection<? extends BoundedWindow> windows,
                                                PaneInfo pane) {
-      sideOutputWindowedValue(tag, makeWindowedValue(output, timestamp, windows, pane));
+      outputWindowedValue(tag, makeWindowedValue(output, timestamp, windows, pane));
     }
 
-    private <T> void sideOutputWindowedValue(TupleTag<T> tag, WindowedValue<T> windowedElem) {
+    private <T> void outputWindowedValue(TupleTag<T> tag, WindowedValue<T> windowedElem) {
       if (!outputTags.contains(tag)) {
         // This tag wasn't declared nor was it seen before during this execution.
         // Thus, this must be a new, undeclared and unconsumed output.
@@ -289,18 +294,18 @@ class SimpleOldDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, OutputT
         // outputs.
         if (outputTags.size() >= MAX_SIDE_OUTPUTS) {
           throw new IllegalArgumentException(
-              "the number of side outputs has exceeded a limit of " + MAX_SIDE_OUTPUTS);
+              "the number of outputs has exceeded a limit of " + MAX_SIDE_OUTPUTS);
         }
         outputTags.add(tag);
       }
 
       outputManager.output(tag, windowedElem);
       if (stepContext != null) {
-        stepContext.noteSideOutput(tag, windowedElem);
+        stepContext.noteOutput(tag, windowedElem);
       }
     }
 
-    // Following implementations of output, outputWithTimestamp, and sideOutput
+    // Following implementations of output, outputWithTimestamp, and output
     // are only accessible in OldDoFn.startBundle and OldDoFn.finishBundle, and will be shadowed by
     // ProcessContext's versions in OldDoFn.processElement.
     @Override
@@ -314,15 +319,15 @@ class SimpleOldDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, OutputT
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
-      checkNotNull(tag, "TupleTag passed to sideOutput cannot be null");
-      sideOutputWindowedValue(tag, output, null, null, PaneInfo.NO_FIRING);
+    public <T> void output(TupleTag<T> tag, T output) {
+      checkNotNull(tag, "TupleTag passed to output cannot be null");
+      outputWindowedValue(tag, output, null, null, PaneInfo.NO_FIRING);
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
-      checkNotNull(tag, "TupleTag passed to sideOutputWithTimestamp cannot be null");
-      sideOutputWindowedValue(tag, output, timestamp, null, PaneInfo.NO_FIRING);
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+      checkNotNull(tag, "TupleTag passed to outputWithTimestamp cannot be null");
+      outputWindowedValue(tag, output, timestamp, null, PaneInfo.NO_FIRING);
     }
 
     @Override
@@ -428,16 +433,16 @@ class SimpleOldDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, OutputT
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
-      checkNotNull(tag, "Tag passed to sideOutput cannot be null");
-      context.sideOutputWindowedValue(tag, windowedValue.withValue(output));
+    public <T> void output(TupleTag<T> tag, T output) {
+      checkNotNull(tag, "Tag passed to output cannot be null");
+      context.outputWindowedValue(tag, windowedValue.withValue(output));
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
-      checkNotNull(tag, "Tag passed to sideOutputWithTimestamp cannot be null");
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+      checkNotNull(tag, "Tag passed to outputWithTimestamp cannot be null");
       checkTimestamp(timestamp);
-      context.sideOutputWindowedValue(
+      context.outputWindowedValue(
           tag, output, timestamp, windowedValue.getWindows(), windowedValue.getPane());
     }
 
@@ -471,13 +476,13 @@ class SimpleOldDoFnRunner<InputT, OutputT> implements DoFnRunner<InputT, OutputT
         }
 
         @Override
-        public <SideOutputT> void sideOutputWindowedValue(
-            TupleTag<SideOutputT> tag,
-            SideOutputT output,
+        public <AdditionalOutputT> void outputWindowedValue(
+            TupleTag<AdditionalOutputT> tag,
+            AdditionalOutputT output,
             Instant timestamp,
             Collection<? extends BoundedWindow> windows,
             PaneInfo pane) {
-          context.sideOutputWindowedValue(tag, output, timestamp, windows, pane);
+          context.outputWindowedValue(tag, output, timestamp, windows, pane);
         }
 
         @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/SplittableParDo.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/SplittableParDo.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/SplittableParDo.java
index c16bf44..9cc965a 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/SplittableParDo.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/SplittableParDo.java
@@ -118,7 +118,7 @@ public class SplittableParDo<InputT, OutputT, RestrictionT>
             input.getWindowingStrategy(),
             parDo.getSideInputs(),
             parDo.getMainOutputTag(),
-            parDo.getSideOutputTags()));
+            parDo.getAdditionalOutputTags()));
   }
 
   private static <InputT, OutputT, RestrictionT>
@@ -188,14 +188,15 @@ public class SplittableParDo<InputT, OutputT, RestrictionT>
     private final WindowingStrategy<?, ?> windowingStrategy;
     private final List<PCollectionView<?>> sideInputs;
     private final TupleTag<OutputT> mainOutputTag;
-    private final TupleTagList sideOutputTags;
+    private final TupleTagList additionalOutputTags;
 
     /**
      * @param fn the splittable {@link DoFn}.
      * @param windowingStrategy the {@link WindowingStrategy} of the input collection.
      * @param sideInputs list of side inputs that should be available to the {@link DoFn}.
      * @param mainOutputTag {@link TupleTag Tag} of the {@link DoFn DoFn's} main output.
-     * @param sideOutputTags {@link TupleTagList Tags} of the {@link DoFn DoFn's} side outputs.
+     * @param additionalOutputTags {@link TupleTagList Tags} of the {@link DoFn DoFn's} additional
+     *     outputs.
      */
     public ProcessElements(
         DoFn<InputT, OutputT> fn,
@@ -204,14 +205,14 @@ public class SplittableParDo<InputT, OutputT, RestrictionT>
         WindowingStrategy<?, ?> windowingStrategy,
         List<PCollectionView<?>> sideInputs,
         TupleTag<OutputT> mainOutputTag,
-        TupleTagList sideOutputTags) {
+        TupleTagList additionalOutputTags) {
       this.fn = fn;
       this.elementCoder = elementCoder;
       this.restrictionCoder = restrictionCoder;
       this.windowingStrategy = windowingStrategy;
       this.sideInputs = sideInputs;
       this.mainOutputTag = mainOutputTag;
-      this.sideOutputTags = sideOutputTags;
+      this.additionalOutputTags = additionalOutputTags;
     }
 
     public DoFn<InputT, OutputT> getFn() {
@@ -226,8 +227,8 @@ public class SplittableParDo<InputT, OutputT, RestrictionT>
       return mainOutputTag;
     }
 
-    public TupleTagList getSideOutputTags() {
-      return sideOutputTags;
+    public TupleTagList getAdditionalOutputTags() {
+      return additionalOutputTags;
     }
 
     public ProcessFn<InputT, OutputT, RestrictionT, TrackerT> newProcessFn(
@@ -244,7 +245,7 @@ public class SplittableParDo<InputT, OutputT, RestrictionT>
       PCollectionTuple outputs =
           PCollectionTuple.ofPrimitiveOutputsInternal(
               input.getPipeline(),
-              TupleTagList.of(mainOutputTag).and(sideOutputTags.getAll()),
+              TupleTagList.of(mainOutputTag).and(additionalOutputTags.getAll()),
               windowingStrategy,
               input.isBounded().and(signature.isBoundedPerElement()));
 
@@ -522,12 +523,12 @@ public class SplittableParDo<InputT, OutputT, RestrictionT>
         }
 
         @Override
-        public <T> void sideOutput(TupleTag<T> tag, T output) {
+        public <T> void output(TupleTag<T> tag, T output) {
           throwUnsupportedOutput();
         }
 
         @Override
-        public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+        public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
           throwUnsupportedOutput();
         }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/WindowingInternals.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/WindowingInternals.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/WindowingInternals.java
index 8dc0bfc..5005065 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/WindowingInternals.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/WindowingInternals.java
@@ -49,11 +49,11 @@ public interface WindowingInternals<InputT, OutputT> {
       Collection<? extends BoundedWindow> windows, PaneInfo pane);
 
   /**
-   * Output the value to a side output at the specified timestamp in the listed windows.
+   * Output the value to a tagged output at the specified timestamp in the listed windows.
    */
-  <SideOutputT> void sideOutputWindowedValue(
-      TupleTag<SideOutputT> tag,
-      SideOutputT output,
+  <AdditionalOutputT> void outputWindowedValue(
+      TupleTag<AdditionalOutputT> tag,
+      AdditionalOutputT output,
       Instant timestamp,
       Collection<? extends BoundedWindow> windows,
       PaneInfo pane);

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/main/java/org/apache/beam/runners/core/WindowingInternalsAdapters.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/WindowingInternalsAdapters.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/WindowingInternalsAdapters.java
index 48a39d6..1b36bf9 100644
--- a/runners/core-java/src/main/java/org/apache/beam/runners/core/WindowingInternalsAdapters.java
+++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/WindowingInternalsAdapters.java
@@ -62,13 +62,13 @@ public class WindowingInternalsAdapters {
       }
 
       @Override
-      public <SideOutputT> void sideOutputWindowedValue(
-          TupleTag<SideOutputT> tag,
-          SideOutputT output,
+      public <AdditionalOutputT> void outputWindowedValue(
+          TupleTag<AdditionalOutputT> tag,
+          AdditionalOutputT output,
           Instant timestamp,
           Collection<? extends BoundedWindow> windows,
           PaneInfo pane) {
-        windowingInternals.sideOutputWindowedValue(tag, output, timestamp, windows, pane);
+        windowingInternals.outputWindowedValue(tag, output, timestamp, windows, pane);
       }
     };
   }

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowsProperties.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowsProperties.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowsProperties.java
index 6c7c4e0..d0a8923 100644
--- a/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowsProperties.java
+++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/GroupAlsoByWindowsProperties.java
@@ -677,9 +677,9 @@ public class GroupAlsoByWindowsProperties {
         }
 
         @Override
-        public <SideOutputT> void sideOutputWindowedValue(
-            TupleTag<SideOutputT> tag,
-            SideOutputT output,
+        public <AdditionalOutputT> void outputWindowedValue(
+            TupleTag<AdditionalOutputT> tag,
+            AdditionalOutputT output,
             Instant timestamp,
             Collection<? extends BoundedWindow> windows,
             PaneInfo pane) {
@@ -729,12 +729,12 @@ public class GroupAlsoByWindowsProperties {
     }
 
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
+    public <T> void output(TupleTag<T> tag, T output) {
       throw new UnsupportedOperationException();
     }
 
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
       throw new UnsupportedOperationException();
     }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/test/java/org/apache/beam/runners/core/NoOpOldDoFn.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/NoOpOldDoFn.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/NoOpOldDoFn.java
index 5cbea8c..2e5cd6d 100644
--- a/runners/core-java/src/test/java/org/apache/beam/runners/core/NoOpOldDoFn.java
+++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/NoOpOldDoFn.java
@@ -57,10 +57,10 @@ class NoOpOldDoFn<InputT, OutputT> extends OldDoFn<InputT, OutputT> {
     public void outputWithTimestamp(OutputT output, Instant timestamp) {
     }
     @Override
-    public <T> void sideOutput(TupleTag<T> tag, T output) {
+    public <T> void output(TupleTag<T> tag, T output) {
     }
     @Override
-    public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output,
+    public <T> void outputWithTimestamp(TupleTag<T> tag, T output,
         Instant timestamp) {
     }
     @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/test/java/org/apache/beam/runners/core/OldDoFnTest.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/OldDoFnTest.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/OldDoFnTest.java
index 651bc72..425de07 100644
--- a/runners/core-java/src/test/java/org/apache/beam/runners/core/OldDoFnTest.java
+++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/OldDoFnTest.java
@@ -160,12 +160,12 @@ public class OldDoFnTest implements Serializable {
       }
 
       @Override
-      public <T> void sideOutput(TupleTag<T> tag, T output) {
+      public <T> void output(TupleTag<T> tag, T output) {
         throw new UnsupportedOperationException();
       }
 
       @Override
-      public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
+      public <T> void outputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
         throw new UnsupportedOperationException();
       }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/test/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvokerTest.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvokerTest.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvokerTest.java
index 965380b..541e238 100644
--- a/runners/core-java/src/test/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvokerTest.java
+++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/OutputAndTimeBoundedSplittableProcessElementInvokerTest.java
@@ -80,9 +80,9 @@ public class OutputAndTimeBoundedSplittableProcessElementInvokerTest {
                   PaneInfo pane) {}
 
               @Override
-              public <SideOutputT> void sideOutputWindowedValue(
-                  TupleTag<SideOutputT> tag,
-                  SideOutputT output,
+              public <AdditionalOutputT> void outputWindowedValue(
+                  TupleTag<AdditionalOutputT> tag,
+                  AdditionalOutputT output,
                   Instant timestamp,
                   Collection<? extends BoundedWindow> windows,
                   PaneInfo pane) {}

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/test/java/org/apache/beam/runners/core/ReduceFnTester.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/ReduceFnTester.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/ReduceFnTester.java
index 512420f..914550e 100644
--- a/runners/core-java/src/test/java/org/apache/beam/runners/core/ReduceFnTester.java
+++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/ReduceFnTester.java
@@ -574,13 +574,13 @@ public class ReduceFnTester<InputT, OutputT, W extends BoundedWindow> {
     }
 
     @Override
-    public <SideOutputT> void sideOutputWindowedValue(
-        TupleTag<SideOutputT> tag,
-        SideOutputT output,
+    public <AdditionalOutputT> void outputWindowedValue(
+        TupleTag<AdditionalOutputT> tag,
+        AdditionalOutputT output,
         Instant timestamp,
         Collection<? extends BoundedWindow> windows,
         PaneInfo pane) {
-      throw new UnsupportedOperationException("GroupAlsoByWindow should not use side outputs");
+      throw new UnsupportedOperationException("GroupAlsoByWindow should not use tagged outputs");
     }
   }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleOldDoFnRunnerTest.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleOldDoFnRunnerTest.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleOldDoFnRunnerTest.java
index 28698ca..8ded2dc 100644
--- a/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleOldDoFnRunnerTest.java
+++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleOldDoFnRunnerTest.java
@@ -64,10 +64,10 @@ public class SimpleOldDoFnRunnerTest {
 
   private DoFnRunner<String, String> createRunner(OldDoFn<String, String> fn) {
     // Pass in only necessary parameters for the test
-    List<TupleTag<?>> sideOutputTags = Arrays.asList();
+    List<TupleTag<?>> additionalOutputTags = Arrays.asList();
     StepContext context = mock(StepContext.class);
     return new SimpleOldDoFnRunner<>(
-          null, fn, null, null, null, sideOutputTags, context, null, null);
+        null, fn, null, null, null, additionalOutputTags, context, null, null);
   }
 
   static class ThrowingDoFn extends OldDoFn<String, String> {

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/core-java/src/test/java/org/apache/beam/runners/core/SplittableParDoTest.java
----------------------------------------------------------------------
diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/SplittableParDoTest.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/SplittableParDoTest.java
index d301113..2c89543 100644
--- a/runners/core-java/src/test/java/org/apache/beam/runners/core/SplittableParDoTest.java
+++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/SplittableParDoTest.java
@@ -356,13 +356,13 @@ public class SplittableParDoTest {
         Instant timestamp,
         Collection<? extends BoundedWindow> windows,
         PaneInfo pane) {
-      sideOutputWindowedValue(tester.getMainOutputTag(), output, timestamp, windows, pane);
+      outputWindowedValue(tester.getMainOutputTag(), output, timestamp, windows, pane);
     }
 
     @Override
-    public <SideOutputT> void sideOutputWindowedValue(
-        TupleTag<SideOutputT> tag,
-        SideOutputT output,
+    public <AdditionalOutputT> void outputWindowedValue(
+        TupleTag<AdditionalOutputT> tag,
+        AdditionalOutputT output,
         Instant timestamp,
         Collection<? extends BoundedWindow> windows,
         PaneInfo pane) {

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
index b4ca998..ce7b12a 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java
@@ -264,13 +264,13 @@ class GroupAlsoByWindowEvaluatorFactory implements TransformEvaluatorFactory {
     }
 
     @Override
-    public <SideOutputT> void sideOutputWindowedValue(
-        TupleTag<SideOutputT> tag,
-        SideOutputT output,
+    public <AdditionalOutputT> void outputWindowedValue(
+        TupleTag<AdditionalOutputT> tag,
+        AdditionalOutputT output,
         Instant timestamp,
         Collection<? extends BoundedWindow> windows,
         PaneInfo pane) {
-      throw new UnsupportedOperationException("GroupAlsoByWindow should not use side outputs");
+      throw new UnsupportedOperationException("GroupAlsoByWindow should not use tagged outputs");
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
index 328d139..49d0723 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java
@@ -51,7 +51,7 @@ class ParDoEvaluator<InputT, OutputT> implements TransformEvaluator<InputT> {
       StructuralKey<?> key,
       List<PCollectionView<?>> sideInputs,
       TupleTag<OutputT> mainOutputTag,
-      List<TupleTag<?>> sideOutputTags,
+      List<TupleTag<?>> additionalOutputTags,
       Map<TupleTag<?>, PCollection<?>> outputs) {
     AggregatorContainer.Mutator aggregatorChanges = evaluationContext.getAggregatorMutator();
 
@@ -80,7 +80,7 @@ class ParDoEvaluator<InputT, OutputT> implements TransformEvaluator<InputT> {
             sideInputReader,
             outputManager,
             mainOutputTag,
-            sideOutputTags,
+            additionalOutputTags,
             stepContext,
             aggregatorChanges,
             windowingStrategy);

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluatorFactory.java
index b8a13e2..0372295 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluatorFactory.java
@@ -80,7 +80,7 @@ final class ParDoEvaluatorFactory<InputT, OutputT> implements TransformEvaluator
                 doFn,
                 transform.getSideInputs(),
                 transform.getMainOutputTag(),
-                transform.getSideOutputTags().getAll());
+                transform.getAdditionalOutputTags().getAll());
     return evaluator;
   }
 
@@ -103,7 +103,7 @@ final class ParDoEvaluatorFactory<InputT, OutputT> implements TransformEvaluator
       DoFn<InputT, OutputT> doFn,
       List<PCollectionView<?>> sideInputs,
       TupleTag<OutputT> mainOutputTag,
-      List<TupleTag<?>> sideOutputTags)
+      List<TupleTag<?>> additionalOutputTags)
       throws Exception {
     String stepName = evaluationContext.getStepName(application);
     DirectStepContext stepContext =
@@ -119,7 +119,7 @@ final class ParDoEvaluatorFactory<InputT, OutputT> implements TransformEvaluator
             inputBundleKey,
             sideInputs,
             mainOutputTag,
-            sideOutputTags,
+            additionalOutputTags,
             stepContext,
             fnManager.<InputT, OutputT>get(),
             fnManager),
@@ -131,7 +131,7 @@ final class ParDoEvaluatorFactory<InputT, OutputT> implements TransformEvaluator
       StructuralKey<?> key,
       List<PCollectionView<?>> sideInputs,
       TupleTag<OutputT> mainOutputTag,
-      List<TupleTag<?>> sideOutputTags,
+      List<TupleTag<?>> additionalOutputTags,
       DirectStepContext stepContext,
       DoFn<InputT, OutputT> fn,
       DoFnLifecycleManager fnManager)
@@ -147,7 +147,7 @@ final class ParDoEvaluatorFactory<InputT, OutputT> implements TransformEvaluator
           key,
           sideInputs,
           mainOutputTag,
-          sideOutputTags,
+          additionalOutputTags,
           pcollections(application.getOutputs()));
     } catch (Exception e) {
       try {

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactory.java
index 00c0d6a..366777b 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactory.java
@@ -192,7 +192,7 @@ class ParDoMultiOverrideFactory<InputT, OutputT>
           PCollectionTuple.ofPrimitiveOutputsInternal(
               input.getPipeline(),
               TupleTagList.of(underlyingParDo.getMainOutputTag())
-                  .and(underlyingParDo.getSideOutputTags().getAll()),
+                  .and(underlyingParDo.getAdditionalOutputTags().getAll()),
               input.getWindowingStrategy(),
               input.isBounded());
 

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/direct-java/src/main/java/org/apache/beam/runners/direct/SplittableProcessElementsEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/SplittableProcessElementsEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/SplittableProcessElementsEvaluatorFactory.java
index 07affd8..64cef35 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/SplittableProcessElementsEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/SplittableProcessElementsEvaluatorFactory.java
@@ -105,7 +105,7 @@ class SplittableProcessElementsEvaluatorFactory<
                 inputBundle.getKey(),
                 transform.getSideInputs(),
                 transform.getMainOutputTag(),
-                transform.getSideOutputTags().getAll(),
+                transform.getAdditionalOutputTags().getAll(),
                 stepContext,
                 processFn,
                 fnManager);
@@ -146,9 +146,9 @@ class SplittableProcessElementsEvaluatorFactory<
               }
 
               @Override
-              public <SideOutputT> void sideOutputWindowedValue(
-                  TupleTag<SideOutputT> tag,
-                  SideOutputT output,
+              public <AdditionalOutputT> void outputWindowedValue(
+                  TupleTag<AdditionalOutputT> tag,
+                  AdditionalOutputT output,
                   Instant timestamp,
                   Collection<? extends BoundedWindow> windows,
                   PaneInfo pane) {

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactory.java
index f8fe3d6..be77ea1 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactory.java
@@ -120,7 +120,7 @@ final class StatefulParDoEvaluatorFactory<K, InputT, OutputT> implements Transfo
             doFn,
             application.getTransform().getUnderlyingParDo().getSideInputs(),
             application.getTransform().getUnderlyingParDo().getMainOutputTag(),
-            application.getTransform().getUnderlyingParDo().getSideOutputTags().getAll());
+            application.getTransform().getUnderlyingParDo().getAdditionalOutputTags().getAll());
 
     return new StatefulParDoEvaluator<>(delegateEvaluator);
   }

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
index 2a94d48..65a1248 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java
@@ -70,7 +70,7 @@ public class ParDoEvaluatorTest {
   @Mock private EvaluationContext evaluationContext;
   private PCollection<Integer> inputPc;
   private TupleTag<Integer> mainOutputTag;
-  private List<TupleTag<?>> sideOutputTags;
+  private List<TupleTag<?>> additionalOutputTags;
   private BundleFactory bundleFactory;
 
   @Rule
@@ -81,7 +81,7 @@ public class ParDoEvaluatorTest {
     MockitoAnnotations.initMocks(this);
     inputPc = p.apply(Create.of(1, 2, 3));
     mainOutputTag = new TupleTag<Integer>() {};
-    sideOutputTags = TupleTagList.empty().getAll();
+    additionalOutputTags = TupleTagList.empty().getAll();
 
     bundleFactory = ImmutableListBundleFactory.create();
   }
@@ -168,7 +168,7 @@ public class ParDoEvaluatorTest {
         null /* key */,
         ImmutableList.<PCollectionView<?>>of(singletonView),
         mainOutputTag,
-        sideOutputTags,
+        additionalOutputTags,
         ImmutableMap.<TupleTag<?>, PCollection<?>>of(mainOutputTag, output));
   }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingTransformTranslators.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingTransformTranslators.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingTransformTranslators.java
index af157f0..fbd7620 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingTransformTranslators.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingTransformTranslators.java
@@ -406,7 +406,7 @@ class FlinkStreamingTransformTranslators {
           DoFn<InputT, OutputT> doFn,
           List<PCollectionView<?>> sideInputs,
           TupleTag<OutputT> mainOutputTag,
-          List<TupleTag<?>> sideOutputTags,
+          List<TupleTag<?>> additionalOutputTags,
           FlinkStreamingTranslationContext context,
           WindowingStrategy<?, ?> windowingStrategy,
           Map<TupleTag<?>, Integer> tagsToLabels,
@@ -422,7 +422,7 @@ class FlinkStreamingTransformTranslators {
         List<PCollectionView<?>> sideInputs,
         Map<TupleTag<?>, PValue> outputs,
         TupleTag<OutputT> mainOutputTag,
-        List<TupleTag<?>> sideOutputTags,
+        List<TupleTag<?>> additionalOutputTags,
         FlinkStreamingTranslationContext context,
         DoFnOperatorFactory<InputT, OutputT> doFnOperatorFactory) {
 
@@ -460,7 +460,7 @@ class FlinkStreamingTransformTranslators {
                 doFn,
                 sideInputs,
                 mainOutputTag,
-                sideOutputTags,
+                additionalOutputTags,
                 context,
                 windowingStrategy,
                 tagsToLabels,
@@ -485,7 +485,7 @@ class FlinkStreamingTransformTranslators {
                 doFn,
                 sideInputs,
                 mainOutputTag,
-                sideOutputTags,
+                additionalOutputTags,
                 context,
                 windowingStrategy,
                 tagsToLabels,
@@ -605,7 +605,7 @@ class FlinkStreamingTransformTranslators {
           transform.getSideInputs(),
           context.getOutputs(transform),
           transform.getMainOutputTag(),
-          transform.getSideOutputTags().getAll(),
+          transform.getAdditionalOutputTags().getAll(),
           context,
           new ParDoTranslationHelper.DoFnOperatorFactory<InputT, OutputT>() {
             @Override
@@ -613,7 +613,7 @@ class FlinkStreamingTransformTranslators {
                 DoFn<InputT, OutputT> doFn,
                 List<PCollectionView<?>> sideInputs,
                 TupleTag<OutputT> mainOutputTag,
-                List<TupleTag<?>> sideOutputTags,
+                List<TupleTag<?>> additionalOutputTags,
                 FlinkStreamingTranslationContext context,
                 WindowingStrategy<?, ?> windowingStrategy,
                 Map<TupleTag<?>, Integer> tagsToLabels,
@@ -624,7 +624,7 @@ class FlinkStreamingTransformTranslators {
                   doFn,
                   inputCoder,
                   mainOutputTag,
-                  sideOutputTags,
+                  additionalOutputTags,
                   new DoFnOperator.MultiOutputOutputManagerFactory(tagsToLabels),
                   windowingStrategy,
                   transformedSideInputs,
@@ -654,7 +654,7 @@ class FlinkStreamingTransformTranslators {
           transform.getSideInputs(),
           context.getOutputs(transform),
           transform.getMainOutputTag(),
-          transform.getSideOutputTags().getAll(),
+          transform.getAdditionalOutputTags().getAll(),
           context,
           new ParDoTranslationHelper.DoFnOperatorFactory<
               KeyedWorkItem<String, ElementAndRestriction<InputT, RestrictionT>>, OutputT>() {
@@ -668,7 +668,7 @@ class FlinkStreamingTransformTranslators {
                         OutputT> doFn,
                     List<PCollectionView<?>> sideInputs,
                     TupleTag<OutputT> mainOutputTag,
-                    List<TupleTag<?>> sideOutputTags,
+                    List<TupleTag<?>> additionalOutputTags,
                     FlinkStreamingTranslationContext context,
                     WindowingStrategy<?, ?> windowingStrategy,
                     Map<TupleTag<?>, Integer> tagsToLabels,
@@ -683,7 +683,7 @@ class FlinkStreamingTransformTranslators {
                   doFn,
                   inputCoder,
                   mainOutputTag,
-                  sideOutputTags,
+                  additionalOutputTags,
                   new DoFnOperator.MultiOutputOutputManagerFactory(tagsToLabels),
                   windowingStrategy,
                   transformedSideInputs,

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
index 9687478..51582af 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java
@@ -87,7 +87,7 @@ public class FlinkDoFnFunction<InputT, OutputT>
     if (outputMap == null) {
       outputManager = new FlinkDoFnFunction.DoFnOutputManager(out);
     } else {
-      // it has some sideOutputs
+      // it has some additional outputs
       outputManager =
           new FlinkDoFnFunction.MultiDoFnOutputManager((Collector) out, outputMap);
     }
@@ -97,7 +97,7 @@ public class FlinkDoFnFunction<InputT, OutputT>
         new FlinkSideInputReader(sideInputs, runtimeContext),
         outputManager,
         mainOutputTag,
-        // see SimpleDoFnRunner, just use it to limit number of side outputs
+        // see SimpleDoFnRunner, just use it to limit number of additional outputs
         Collections.<TupleTag<?>>emptyList(),
         new FlinkNoOpStepContext(),
         new FlinkAggregatorFactory(runtimeContext),

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoOpStepContext.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoOpStepContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoOpStepContext.java
index d901d8e..847a00a 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoOpStepContext.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoOpStepContext.java
@@ -47,7 +47,7 @@ public class FlinkNoOpStepContext implements StepContext {
   }
 
   @Override
-  public void noteSideOutput(TupleTag<?> tag, WindowedValue<?> output) {
+  public void noteOutput(TupleTag<?> tag, WindowedValue<?> output) {
 
   }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/113471d6/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkStatefulDoFnFunction.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkStatefulDoFnFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkStatefulDoFnFunction.java
index 0d8399e..c8193d2 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkStatefulDoFnFunction.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkStatefulDoFnFunction.java
@@ -88,7 +88,7 @@ public class FlinkStatefulDoFnFunction<K, V, OutputT>
     if (outputMap == null) {
       outputManager = new FlinkDoFnFunction.DoFnOutputManager(out);
     } else {
-      // it has some sideOutputs
+      // it has some additional Outputs
       outputManager =
           new FlinkDoFnFunction.MultiDoFnOutputManager((Collector) out, outputMap);
     }
@@ -114,7 +114,7 @@ public class FlinkStatefulDoFnFunction<K, V, OutputT>
         new FlinkSideInputReader(sideInputs, runtimeContext),
         outputManager,
         mainOutputTag,
-        // see SimpleDoFnRunner, just use it to limit number of side outputs
+        // see SimpleDoFnRunner, just use it to limit number of additional outputs
         Collections.<TupleTag<?>>emptyList(),
         new FlinkNoOpStepContext() {
           @Override


[33/50] [abbrv] beam git commit: Only compile HIFIO ITs when compiling with java 8.

Posted by ke...@apache.org.
Only compile HIFIO ITs when compiling with java 8.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/8330e158
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/8330e158
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/8330e158

Branch: refs/heads/jstorm-runner
Commit: 8330e15820cb1e2db64f4fd31f50450e782668d4
Parents: 4ff244d
Author: Stephen Sisk <si...@google.com>
Authored: Fri Apr 14 17:27:22 2017 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Mon Apr 17 17:40:50 2017 -0700

----------------------------------------------------------------------
 sdks/java/io/hadoop/pom.xml | 12 +++++++++++-
 1 file changed, 11 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/8330e158/sdks/java/io/hadoop/pom.xml
----------------------------------------------------------------------
diff --git a/sdks/java/io/hadoop/pom.xml b/sdks/java/io/hadoop/pom.xml
index 1982c25..14a19bb 100644
--- a/sdks/java/io/hadoop/pom.xml
+++ b/sdks/java/io/hadoop/pom.xml
@@ -29,10 +29,20 @@
   <description>Parent for Beam SDK Hadoop IO which reads data from any source which implements Hadoop Input Format.</description>
 
   <modules>
-    <module>jdk1.8-tests</module>
     <module>input-format</module>
   </modules>
 
+  <profiles>
+    <profile>
+      <activation>
+        <jdk>[1.8,)</jdk>
+      </activation>
+      <modules>
+        <module>jdk1.8-tests</module>
+      </modules>
+    </profile>
+  </profiles>
+
   <dependencies>
     <dependency>
       <groupId>org.apache.beam</groupId>


[40/50] [abbrv] beam git commit: This closes #2565

Posted by ke...@apache.org.
This closes #2565


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/c52ce7c4
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/c52ce7c4
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/c52ce7c4

Branch: refs/heads/jstorm-runner
Commit: c52ce7c4bd952d943bccb8acff53b36b40c35428
Parents: 36e4355 7c858a8
Author: Dan Halperin <dh...@google.com>
Authored: Mon Apr 17 21:02:34 2017 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Mon Apr 17 21:02:34 2017 -0700

----------------------------------------------------------------------
 .../core/src/main/java/org/apache/beam/sdk/transforms/Combine.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------



[47/50] [abbrv] beam git commit: [BEAM-1993] Remove special unbounded Flink source/sink

Posted by ke...@apache.org.
[BEAM-1993] Remove special unbounded Flink source/sink


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/d8213fa6
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/d8213fa6
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/d8213fa6

Branch: refs/heads/jstorm-runner
Commit: d8213fa6b274cd6acbf4da6deffd21ca23fd7f42
Parents: fac4f3e
Author: Isma�l Mej�a <ie...@apache.org>
Authored: Tue Apr 18 16:03:11 2017 +0200
Committer: Isma�l Mej�a <ie...@apache.org>
Committed: Tue Apr 18 16:15:09 2017 +0200

----------------------------------------------------------------------
 .../examples/streaming/KafkaIOExamples.java     | 338 -------------------
 .../KafkaWindowedWordCountExample.java          | 164 ---------
 .../FlinkStreamingTransformTranslators.java     |  87 +----
 .../flink/translation/types/FlinkCoder.java     |  63 ----
 .../streaming/io/UnboundedFlinkSink.java        | 200 -----------
 .../streaming/io/UnboundedFlinkSource.java      | 120 -------
 6 files changed, 12 insertions(+), 960 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/d8213fa6/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java
deleted file mode 100644
index 616e276..0000000
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java
+++ /dev/null
@@ -1,338 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.flink.examples.streaming;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.Properties;
-import org.apache.beam.runners.flink.FlinkPipelineOptions;
-import org.apache.beam.runners.flink.FlinkRunner;
-import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedFlinkSink;
-import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedFlinkSource;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.coders.AvroCoder;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.coders.StringUtf8Coder;
-import org.apache.beam.sdk.io.Read;
-import org.apache.beam.sdk.io.Write;
-import org.apache.beam.sdk.options.Default;
-import org.apache.beam.sdk.options.Description;
-import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.flink.api.common.typeinfo.TypeInformation;
-import org.apache.flink.api.java.typeutils.TypeExtractor;
-import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08;
-import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer08;
-import org.apache.flink.streaming.util.serialization.DeserializationSchema;
-import org.apache.flink.streaming.util.serialization.SerializationSchema;
-import org.apache.flink.streaming.util.serialization.SimpleStringSchema;
-
-/**
- * Recipes/Examples that demonstrate how to read/write data from/to Kafka.
- */
-public class KafkaIOExamples {
-
-
-  private static final String KAFKA_TOPIC = "input";  // Default kafka topic to read from
-  private static final String KAFKA_AVRO_TOPIC = "output";  // Default kafka topic to read from
-  private static final String KAFKA_BROKER = "localhost:9092";  // Default kafka broker to contact
-  private static final String GROUP_ID = "myGroup";  // Default groupId
-  private static final String ZOOKEEPER = "localhost:2181";  // Default zookeeper to connect (Kafka)
-
-  /**
-   * Read/Write String data to Kafka.
-   */
-  public static class KafkaString {
-
-    /**
-     * Read String data from Kafka.
-     */
-    public static class ReadStringFromKafka {
-
-      public static void main(String[] args) {
-
-        Pipeline p = initializePipeline(args);
-        KafkaOptions options = getOptions(p);
-
-        FlinkKafkaConsumer08<String> kafkaConsumer =
-            new FlinkKafkaConsumer08<>(options.getKafkaTopic(),
-                new SimpleStringSchema(), getKafkaProps(options));
-
-        p
-            .apply(Read.from(UnboundedFlinkSource.of(kafkaConsumer))).setCoder(StringUtf8Coder.of())
-            .apply(ParDo.of(new PrintFn<>()));
-
-        p.run();
-
-      }
-
-    }
-
-    /**
-     * Write String data to Kafka.
-     */
-    public static class WriteStringToKafka {
-
-      public static void main(String[] args) {
-
-        Pipeline p = initializePipeline(args);
-        KafkaOptions options = getOptions(p);
-
-        PCollection<String> words =
-            p.apply(Create.of("These", "are", "some", "words"));
-
-        FlinkKafkaProducer08<String> kafkaSink =
-            new FlinkKafkaProducer08<>(options.getKafkaTopic(),
-                new SimpleStringSchema(), getKafkaProps(options));
-
-        words.apply(Write.to(UnboundedFlinkSink.of(kafkaSink)));
-
-        p.run();
-      }
-
-    }
-  }
-
-  /**
-   * Read/Write Avro data to Kafka.
-   */
-  public static class KafkaAvro {
-
-    /**
-     * Read Avro data from Kafka.
-     */
-    public static class ReadAvroFromKafka {
-
-      public static void main(String[] args) {
-
-        Pipeline p = initializePipeline(args);
-        KafkaOptions options = getOptions(p);
-
-        FlinkKafkaConsumer08<MyType> kafkaConsumer =
-            new FlinkKafkaConsumer08<>(options.getKafkaAvroTopic(),
-                new AvroSerializationDeserializationSchema<>(MyType.class), getKafkaProps(options));
-
-        p
-            .apply(Read.from(UnboundedFlinkSource.of(kafkaConsumer)))
-                .setCoder(AvroCoder.of(MyType.class))
-            .apply(ParDo.of(new PrintFn<>()));
-
-        p.run();
-
-      }
-
-    }
-
-    /**
-     * Write Avro data to Kafka.
-     */
-    public static class WriteAvroToKafka {
-
-      public static void main(String[] args) {
-
-        Pipeline p = initializePipeline(args);
-        KafkaOptions options = getOptions(p);
-
-        PCollection<MyType> words =
-            p.apply(Create.of(
-                new MyType("word", 1L),
-                new MyType("another", 2L),
-                new MyType("yet another", 3L)));
-
-        FlinkKafkaProducer08<MyType> kafkaSink =
-            new FlinkKafkaProducer08<>(options.getKafkaAvroTopic(),
-                new AvroSerializationDeserializationSchema<>(MyType.class), getKafkaProps(options));
-
-        words.apply(Write.to(UnboundedFlinkSink.of(kafkaSink)));
-
-        p.run();
-
-      }
-    }
-
-    /**
-     * Serialiation/Deserialiation schema for Avro types.
-     * @param <T> the type being encoded
-     */
-    static class AvroSerializationDeserializationSchema<T>
-        implements SerializationSchema<T>, DeserializationSchema<T> {
-
-      private final Class<T> avroType;
-
-      private final AvroCoder<T> coder;
-      private transient ByteArrayOutputStream out;
-
-      AvroSerializationDeserializationSchema(Class<T> clazz) {
-        this.avroType = clazz;
-        this.coder = AvroCoder.of(clazz);
-        this.out = new ByteArrayOutputStream();
-      }
-
-      @Override
-      public byte[] serialize(T element) {
-        if (out == null) {
-          out = new ByteArrayOutputStream();
-        }
-        try {
-          out.reset();
-          coder.encode(element, out, Coder.Context.NESTED);
-        } catch (IOException e) {
-          throw new RuntimeException("Avro encoding failed.", e);
-        }
-        return out.toByteArray();
-      }
-
-      @Override
-      public T deserialize(byte[] message) throws IOException {
-        return coder.decode(new ByteArrayInputStream(message), Coder.Context.NESTED);
-      }
-
-      @Override
-      public boolean isEndOfStream(T nextElement) {
-        return false;
-      }
-
-      @Override
-      public TypeInformation<T> getProducedType() {
-        return TypeExtractor.getForClass(avroType);
-      }
-    }
-
-    /**
-     * Custom type for Avro serialization.
-     */
-    static class MyType implements Serializable {
-
-      public MyType() {}
-
-      MyType(String word, long count) {
-        this.word = word;
-        this.count = count;
-      }
-
-      String word;
-      long count;
-
-      @Override
-      public String toString() {
-        return "MyType{"
-            + "word='" + word + '\''
-            + ", count=" + count
-            + '}';
-      }
-    }
-  }
-
-  // -------------- Utilities --------------
-
-  /**
-   * Custom options for the Pipeline.
-   */
-  public interface KafkaOptions extends FlinkPipelineOptions {
-    @Description("The Kafka topic to read from")
-    @Default.String(KAFKA_TOPIC)
-    String getKafkaTopic();
-
-    void setKafkaTopic(String value);
-
-    void setKafkaAvroTopic(String value);
-
-    @Description("The Kafka topic to read from")
-    @Default.String(KAFKA_AVRO_TOPIC)
-    String getKafkaAvroTopic();
-
-    @Description("The Kafka Broker to read from")
-    @Default.String(KAFKA_BROKER)
-    String getBroker();
-
-    void setBroker(String value);
-
-    @Description("The Zookeeper server to connect to")
-    @Default.String(ZOOKEEPER)
-    String getZookeeper();
-
-    void setZookeeper(String value);
-
-    @Description("The groupId")
-    @Default.String(GROUP_ID)
-    String getGroup();
-
-    void setGroup(String value);
-  }
-
-  /**
-   * Initializes some options for the Flink runner.
-   * @param args The command line args
-   * @return the pipeline
-   */
-  private static Pipeline initializePipeline(String[] args) {
-    KafkaOptions options =
-        PipelineOptionsFactory.fromArgs(args).as(KafkaOptions.class);
-
-    options.setStreaming(true);
-    options.setRunner(FlinkRunner.class);
-
-    options.setCheckpointingInterval(1000L);
-    options.setNumberOfExecutionRetries(5);
-    options.setExecutionRetryDelay(3000L);
-
-    return Pipeline.create(options);
-  }
-
-  /**
-   * Gets KafkaOptions from the Pipeline.
-   * @param p the pipeline
-   * @return KafkaOptions
-   */
-  private static KafkaOptions getOptions(Pipeline p) {
-    return p.getOptions().as(KafkaOptions.class);
-  }
-
-  /**
-   * Helper method to set the Kafka props from the pipeline options.
-   * @param options KafkaOptions
-   * @return Kafka props
-   */
-  private static Properties getKafkaProps(KafkaOptions options) {
-
-    Properties props = new Properties();
-    props.setProperty("zookeeper.connect", options.getZookeeper());
-    props.setProperty("bootstrap.servers", options.getBroker());
-    props.setProperty("group.id", options.getGroup());
-
-    return props;
-  }
-
-  /**
-   * Print contents to stdout.
-   * @param <T> type of the input
-   */
-  private static class PrintFn<T> extends DoFn<T, T> {
-
-    @ProcessElement
-    public void processElement(ProcessContext c) throws Exception {
-      System.out.println(c.element().toString());
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/d8213fa6/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java
----------------------------------------------------------------------
diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java
deleted file mode 100644
index ee0e874..0000000
--- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.flink.examples.streaming;
-
-import java.util.Properties;
-import org.apache.beam.runners.flink.FlinkRunner;
-import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedFlinkSource;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.io.Read;
-import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.options.Default;
-import org.apache.beam.sdk.options.Description;
-import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.transforms.Aggregator;
-import org.apache.beam.sdk.transforms.Count;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.Sum;
-import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
-import org.apache.beam.sdk.transforms.windowing.FixedWindows;
-import org.apache.beam.sdk.transforms.windowing.Window;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08;
-import org.apache.flink.streaming.util.serialization.SimpleStringSchema;
-import org.joda.time.Duration;
-
-/**
- * Wordcount example using Kafka topic.
- */
-public class KafkaWindowedWordCountExample {
-
-  static final String KAFKA_TOPIC = "test";  // Default kafka topic to read from
-  static final String KAFKA_BROKER = "localhost:9092";  // Default kafka broker to contact
-  static final String GROUP_ID = "myGroup";  // Default groupId
-  static final String ZOOKEEPER = "localhost:2181";  // Default zookeeper to connect to for Kafka
-
-  /**
-   * Function to extract words.
-   */
-  public static class ExtractWordsFn extends DoFn<String, String> {
-    private final Aggregator<Long, Long> emptyLines =
-        createAggregator("emptyLines", Sum.ofLongs());
-
-    @ProcessElement
-    public void processElement(ProcessContext c) {
-      if (c.element().trim().isEmpty()) {
-        emptyLines.addValue(1L);
-      }
-
-      // Split the line into words.
-      String[] words = c.element().split("[^a-zA-Z']+");
-
-      // Output each word encountered into the output PCollection.
-      for (String word : words) {
-        if (!word.isEmpty()) {
-          c.output(word);
-        }
-      }
-    }
-  }
-
-  /**
-   * Function to format KV as String.
-   */
-  public static class FormatAsStringFn extends DoFn<KV<String, Long>, String> {
-    @ProcessElement
-    public void processElement(ProcessContext c) {
-      String row = c.element().getKey() + " - " + c.element().getValue() + " @ "
-          + c.timestamp().toString();
-      System.out.println(row);
-      c.output(row);
-    }
-  }
-
-  /**
-   * Pipeline options.
-   */
-  public interface KafkaStreamingWordCountOptions
-      extends WindowedWordCount.StreamingWordCountOptions {
-    @Description("The Kafka topic to read from")
-    @Default.String(KAFKA_TOPIC)
-    String getKafkaTopic();
-
-    void setKafkaTopic(String value);
-
-    @Description("The Kafka Broker to read from")
-    @Default.String(KAFKA_BROKER)
-    String getBroker();
-
-    void setBroker(String value);
-
-    @Description("The Zookeeper server to connect to")
-    @Default.String(ZOOKEEPER)
-    String getZookeeper();
-
-    void setZookeeper(String value);
-
-    @Description("The groupId")
-    @Default.String(GROUP_ID)
-    String getGroup();
-
-    void setGroup(String value);
-
-  }
-
-  public static void main(String[] args) {
-    PipelineOptionsFactory.register(KafkaStreamingWordCountOptions.class);
-    KafkaStreamingWordCountOptions options = PipelineOptionsFactory.fromArgs(args)
-        .as(KafkaStreamingWordCountOptions.class);
-    options.setJobName("KafkaExample - WindowSize: " + options.getWindowSize() + " seconds");
-    options.setStreaming(true);
-    options.setCheckpointingInterval(1000L);
-    options.setNumberOfExecutionRetries(5);
-    options.setExecutionRetryDelay(3000L);
-    options.setRunner(FlinkRunner.class);
-
-    System.out.println(options.getKafkaTopic() + " " + options.getZookeeper() + " "
-        + options.getBroker() + " " + options.getGroup());
-    Pipeline pipeline = Pipeline.create(options);
-
-    Properties p = new Properties();
-    p.setProperty("zookeeper.connect", options.getZookeeper());
-    p.setProperty("bootstrap.servers", options.getBroker());
-    p.setProperty("group.id", options.getGroup());
-
-    // this is the Flink consumer that reads the input to
-    // the program from a kafka topic.
-    FlinkKafkaConsumer08<String> kafkaConsumer = new FlinkKafkaConsumer08<>(
-        options.getKafkaTopic(),
-        new SimpleStringSchema(), p);
-
-    PCollection<String> words = pipeline
-        .apply("StreamingWordCount", Read.from(UnboundedFlinkSource.of(kafkaConsumer)))
-        .apply(ParDo.of(new ExtractWordsFn()))
-        .apply(Window.<String>into(FixedWindows.of(
-            Duration.standardSeconds(options.getWindowSize())))
-            .triggering(AfterWatermark.pastEndOfWindow()).withAllowedLateness(Duration.ZERO)
-            .discardingFiredPanes());
-
-    PCollection<KV<String, Long>> wordCounts =
-        words.apply(Count.<String>perElement());
-
-    wordCounts.apply(ParDo.of(new FormatAsStringFn()))
-        .apply(TextIO.Write.to("./outputKafka.txt"));
-
-    pipeline.run();
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/d8213fa6/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingTransformTranslators.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingTransformTranslators.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingTransformTranslators.java
index fbd7620..123d5e7 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingTransformTranslators.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingTransformTranslators.java
@@ -36,7 +36,6 @@ import org.apache.beam.runners.core.SplittableParDo;
 import org.apache.beam.runners.core.SystemReduceFn;
 import org.apache.beam.runners.flink.translation.functions.FlinkAssignWindows;
 import org.apache.beam.runners.flink.translation.types.CoderTypeInformation;
-import org.apache.beam.runners.flink.translation.types.FlinkCoder;
 import org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator;
 import org.apache.beam.runners.flink.translation.wrappers.streaming.KvToByteBufferKeySelector;
 import org.apache.beam.runners.flink.translation.wrappers.streaming.SingletonKeyedWorkItem;
@@ -45,17 +44,13 @@ import org.apache.beam.runners.flink.translation.wrappers.streaming.SplittableDo
 import org.apache.beam.runners.flink.translation.wrappers.streaming.WindowDoFnOperator;
 import org.apache.beam.runners.flink.translation.wrappers.streaming.WorkItemKeySelector;
 import org.apache.beam.runners.flink.translation.wrappers.streaming.io.BoundedSourceWrapper;
-import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedFlinkSink;
-import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedFlinkSource;
 import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSourceWrapper;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.KvCoder;
 import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.coders.VoidCoder;
 import org.apache.beam.sdk.io.Read;
-import org.apache.beam.sdk.io.Sink;
 import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.io.Write;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
@@ -69,7 +64,6 @@ import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
 import org.apache.beam.sdk.transforms.splittabledofn.RestrictionTracker;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
-import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.transforms.windowing.Window;
 import org.apache.beam.sdk.transforms.windowing.WindowFn;
 import org.apache.beam.sdk.util.AppliedCombineFn;
@@ -94,12 +88,10 @@ import org.apache.flink.streaming.api.datastream.DataStreamSource;
 import org.apache.flink.streaming.api.datastream.KeyedStream;
 import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
 import org.apache.flink.streaming.api.datastream.SplitStream;
-import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
 import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
 import org.apache.flink.streaming.api.operators.TwoInputStreamOperator;
 import org.apache.flink.streaming.api.transformations.TwoInputTransformation;
 import org.apache.flink.util.Collector;
-import org.joda.time.Instant;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -124,7 +116,6 @@ class FlinkStreamingTransformTranslators {
   static {
     TRANSLATORS.put(Read.Bounded.class, new BoundedReadSourceTranslator());
     TRANSLATORS.put(Read.Unbounded.class, new UnboundedReadSourceTranslator());
-    TRANSLATORS.put(Write.class, new WriteSinkStreamingTranslator());
     TRANSLATORS.put(TextIO.Write.Bound.class, new TextIOWriteBoundStreamingTranslator());
 
     TRANSLATORS.put(ParDo.MultiOutput.class, new ParDoStreamingTranslator());
@@ -203,31 +194,6 @@ class FlinkStreamingTransformTranslators {
     }
   }
 
-  private static class WriteSinkStreamingTranslator<T>
-      extends FlinkStreamingPipelineTranslator.StreamTransformTranslator<Write<T>> {
-
-    @Override
-    public void translateNode(Write<T> transform, FlinkStreamingTranslationContext context) {
-      String name = transform.getName();
-      PValue input = context.getInput(transform);
-
-      Sink<T> sink = transform.getSink();
-      if (!(sink instanceof UnboundedFlinkSink)) {
-        throw new UnsupportedOperationException(
-            "At the time, only unbounded Flink sinks are supported.");
-      }
-
-      DataStream<WindowedValue<T>> inputDataSet = context.getInputDataStream(input);
-
-      inputDataSet.flatMap(new FlatMapFunction<WindowedValue<T>, Object>() {
-        @Override
-        public void flatMap(WindowedValue<T> value, Collector<Object> out) throws Exception {
-          out.collect(value.getValue());
-        }
-      }).addSink(((UnboundedFlinkSink<Object>) sink).getFlinkSource()).name(name);
-    }
-  }
-
   private static class UnboundedReadSourceTranslator<T>
       extends FlinkStreamingPipelineTranslator.StreamTransformTranslator<Read.Unbounded<T>> {
 
@@ -241,47 +207,18 @@ class FlinkStreamingTransformTranslators {
           context.getTypeInfo(context.getOutput(transform));
 
       DataStream<WindowedValue<T>> source;
-      if (transform.getSource().getClass().equals(UnboundedFlinkSource.class)) {
-        @SuppressWarnings("unchecked")
-        UnboundedFlinkSource<T> flinkSourceFunction =
-            (UnboundedFlinkSource<T>) transform.getSource();
-
-        final AssignerWithPeriodicWatermarks<T> flinkAssigner =
-            flinkSourceFunction.getFlinkTimestampAssigner();
-
-        DataStream<T> flinkSource = context.getExecutionEnvironment()
-            .addSource(flinkSourceFunction.getFlinkSource());
-
-        flinkSourceFunction.setCoder(
-            new FlinkCoder<T>(flinkSource.getType(),
-              context.getExecutionEnvironment().getConfig()));
-
-        source = flinkSource
-            .assignTimestampsAndWatermarks(flinkAssigner)
-            .flatMap(new FlatMapFunction<T, WindowedValue<T>>() {
-              @Override
-              public void flatMap(T s, Collector<WindowedValue<T>> collector) throws Exception {
-                collector.collect(
-                    WindowedValue.of(
-                        s,
-                        new Instant(flinkAssigner.extractTimestamp(s, -1)),
-                        GlobalWindow.INSTANCE,
-                        PaneInfo.NO_FIRING));
-              }}).returns(outputTypeInfo);
-      } else {
-        try {
-          UnboundedSourceWrapper<T, ?> sourceWrapper =
-              new UnboundedSourceWrapper<>(
-                  context.getPipelineOptions(),
-                  transform.getSource(),
-                  context.getExecutionEnvironment().getParallelism());
-          source = context
-              .getExecutionEnvironment()
-              .addSource(sourceWrapper).name(transform.getName()).returns(outputTypeInfo);
-        } catch (Exception e) {
-          throw new RuntimeException(
-              "Error while translating UnboundedSource: " + transform.getSource(), e);
-        }
+      try {
+        UnboundedSourceWrapper<T, ?> sourceWrapper =
+            new UnboundedSourceWrapper<>(
+                context.getPipelineOptions(),
+                transform.getSource(),
+                context.getExecutionEnvironment().getParallelism());
+        source = context
+            .getExecutionEnvironment()
+            .addSource(sourceWrapper).name(transform.getName()).returns(outputTypeInfo);
+      } catch (Exception e) {
+        throw new RuntimeException(
+            "Error while translating UnboundedSource: " + transform.getSource(), e);
       }
 
       context.setOutputDataStream(output, source);

http://git-wip-us.apache.org/repos/asf/beam/blob/d8213fa6/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/FlinkCoder.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/FlinkCoder.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/FlinkCoder.java
deleted file mode 100644
index 8b90c73..0000000
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/FlinkCoder.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.flink.translation.types;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.Collections;
-import java.util.List;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.coders.StandardCoder;
-import org.apache.flink.api.common.ExecutionConfig;
-import org.apache.flink.api.common.typeinfo.TypeInformation;
-import org.apache.flink.api.common.typeutils.TypeSerializer;
-import org.apache.flink.core.memory.DataInputViewStreamWrapper;
-import org.apache.flink.core.memory.DataOutputViewStreamWrapper;
-
-/**
- * A Coder that uses Flink's serialization system.
- * @param <T> The type of the value to be encoded
- */
-public class FlinkCoder<T> extends StandardCoder<T> {
-
-  private final TypeSerializer<T> typeSerializer;
-
-  public FlinkCoder(TypeInformation<T> typeInformation, ExecutionConfig executionConfig) {
-    this.typeSerializer = typeInformation.createSerializer(executionConfig);
-  }
-
-  @Override
-  public void encode(T value, OutputStream outStream, Context context) throws IOException {
-    typeSerializer.serialize(value, new DataOutputViewStreamWrapper(outStream));
-  }
-
-  @Override
-  public T decode(InputStream inStream, Context context) throws IOException {
-    return typeSerializer.deserialize(new DataInputViewStreamWrapper(inStream));
-  }
-
-  @Override
-  public List<? extends Coder<?>> getCoderArguments() {
-    return Collections.emptyList();
-  }
-
-  @Override
-  public void verifyDeterministic() throws NonDeterministicException {
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/d8213fa6/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSink.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSink.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSink.java
deleted file mode 100644
index af36b80..0000000
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSink.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.flink.translation.wrappers.streaming.io;
-
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.Collection;
-import java.util.List;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.coders.CoderException;
-import org.apache.beam.sdk.io.Sink;
-import org.apache.beam.sdk.io.UnboundedSource;
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.transforms.windowing.PaneInfo;
-import org.apache.beam.sdk.util.CloudObject;
-import org.apache.beam.sdk.util.common.ElementByteSizeObserver;
-import org.apache.beam.sdk.values.TypeDescriptor;
-import org.apache.flink.streaming.api.functions.sink.SinkFunction;
-
-/**
- * A wrapper translating Flink sinks implementing the {@link SinkFunction} interface, into
- * unbounded Beam sinks (see {@link UnboundedSource}).
- * */
-public class UnboundedFlinkSink<T> extends Sink<T> {
-
-  /* The Flink sink function */
-  private final SinkFunction<T> flinkSink;
-
-  private UnboundedFlinkSink(SinkFunction<T> flinkSink) {
-    this.flinkSink = flinkSink;
-  }
-
-  public SinkFunction<T> getFlinkSource() {
-    return this.flinkSink;
-  }
-
-  @Override
-  public void validate(PipelineOptions options) {
-  }
-
-  @Override
-  public WriteOperation<T, ?> createWriteOperation(PipelineOptions options) {
-    return new WriteOperation<T, Object>() {
-      @Override
-      public void initialize(PipelineOptions options) throws Exception {
-
-      }
-
-      @Override
-      public void setWindowedWrites(boolean windowedWrites) {
-      }
-
-      @Override
-      public void finalize(Iterable<Object> writerResults, PipelineOptions options)
-          throws Exception {
-
-      }
-
-      @Override
-      public Coder<Object> getWriterResultCoder() {
-        return new Coder<Object>() {
-          @Override
-          public void encode(Object value, OutputStream outStream, Context context)
-              throws CoderException, IOException {
-
-          }
-
-          @Override
-          public Object decode(InputStream inStream, Context context)
-              throws CoderException, IOException {
-            return null;
-          }
-
-          @Override
-          public List<? extends Coder<?>> getCoderArguments() {
-            return null;
-          }
-
-          @Override
-          public CloudObject asCloudObject() {
-            return null;
-          }
-
-          @Override
-          public void verifyDeterministic() throws NonDeterministicException {
-
-          }
-
-          @Override
-          public boolean consistentWithEquals() {
-            return false;
-          }
-
-          @Override
-          public Object structuralValue(Object value) throws Exception {
-            return null;
-          }
-
-          @Override
-          public boolean isRegisterByteSizeObserverCheap(Object value, Context context) {
-            return false;
-          }
-
-          @Override
-          public void registerByteSizeObserver(Object value,
-                                               ElementByteSizeObserver observer,
-                                               Context context) throws Exception {
-
-          }
-
-          @Override
-          public String getEncodingId() {
-            return null;
-          }
-
-          @Override
-          public Collection<String> getAllowedEncodings() {
-            return null;
-          }
-
-          @Override
-          public TypeDescriptor<Object> getEncodedTypeDescriptor() {
-            return TypeDescriptor.of(Object.class);
-          }
-        };
-      }
-
-      @Override
-      public Writer<T, Object> createWriter(PipelineOptions options) throws Exception {
-        return new Writer<T, Object>() {
-          @Override
-          public void openWindowed(String uId,
-                                   BoundedWindow window,
-                                   PaneInfo paneInfo,
-                                   int shard,
-                                   int numShards) throws Exception {
-          }
-
-          @Override
-          public void openUnwindowed(String uId, int shard, int numShards) throws Exception {
-          }
-
-          @Override
-          public void cleanup() throws Exception {
-
-          }
-
-          @Override
-          public void write(T value) throws Exception {
-
-          }
-
-          @Override
-          public Object close() throws Exception {
-            return null;
-          }
-
-          @Override
-          public WriteOperation<T, Object> getWriteOperation() {
-            return null;
-          }
-
-        };
-      }
-
-      @Override
-      public Sink<T> getSink() {
-        return UnboundedFlinkSink.this;
-      }
-    };
-  }
-
-  /**
-   * Creates a Flink sink to write to using the Write API.
-   * @param flinkSink The Flink sink, e.g. FlinkKafkaProducer09
-   * @param <T> The input type of the sink
-   * @return A Beam sink wrapping a Flink sink
-   */
-  public static <T> Sink<T> of(SinkFunction<T> flinkSink) {
-    return new UnboundedFlinkSink<>(flinkSink);
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/d8213fa6/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSource.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSource.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSource.java
deleted file mode 100644
index ac20c34..0000000
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSource.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.runners.flink.translation.wrappers.streaming.io;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import java.util.List;
-import javax.annotation.Nullable;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.io.UnboundedSource;
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
-import org.apache.flink.streaming.api.functions.IngestionTimeExtractor;
-import org.apache.flink.streaming.api.functions.source.SourceFunction;
-
-/**
- * A wrapper translating Flink Sources implementing the {@link SourceFunction} interface, into
- * unbounded Beam sources (see {@link UnboundedSource}).
- * */
-public class UnboundedFlinkSource<T> extends UnboundedSource<T, UnboundedSource.CheckpointMark> {
-
-  private final SourceFunction<T> flinkSource;
-
-  /** Coder set during translation. */
-  private Coder<T> coder;
-
-  /** Timestamp / watermark assigner for source; defaults to ingestion time. */
-  private AssignerWithPeriodicWatermarks<T> flinkTimestampAssigner =
-      new IngestionTimeExtractor<T>();
-
-  public UnboundedFlinkSource(SourceFunction<T> source) {
-    flinkSource = checkNotNull(source);
-  }
-
-  public UnboundedFlinkSource(SourceFunction<T> source,
-                              AssignerWithPeriodicWatermarks<T> timestampAssigner) {
-    flinkSource = checkNotNull(source);
-    flinkTimestampAssigner = checkNotNull(timestampAssigner);
-  }
-
-  public SourceFunction<T> getFlinkSource() {
-    return this.flinkSource;
-  }
-
-  public AssignerWithPeriodicWatermarks<T> getFlinkTimestampAssigner() {
-    return flinkTimestampAssigner;
-  }
-
-  @Override
-  public List<? extends UnboundedSource<T, UnboundedSource.CheckpointMark>> generateInitialSplits(
-      int desiredNumSplits,
-      PipelineOptions options) throws Exception {
-    throw new RuntimeException("Flink Sources are supported only when "
-        + "running with the FlinkRunner.");
-  }
-
-  @Override
-  public UnboundedReader<T> createReader(PipelineOptions options,
-                                         @Nullable CheckpointMark checkpointMark) {
-    throw new RuntimeException("Flink Sources are supported only when "
-        + "running with the FlinkRunner.");
-  }
-
-  @Nullable
-  @Override
-  public Coder<UnboundedSource.CheckpointMark> getCheckpointMarkCoder() {
-    throw new RuntimeException("Flink Sources are supported only when "
-        + "running with the FlinkRunner.");
-  }
-
-
-  @Override
-  public void validate() {
-  }
-
-  @Override
-  public Coder<T> getDefaultOutputCoder() {
-    // The coder derived from the Flink source
-    return coder;
-  }
-
-  public void setCoder(Coder<T> coder) {
-    this.coder = coder;
-  }
-
-  public void setFlinkTimestampAssigner(AssignerWithPeriodicWatermarks<T> flinkTimestampAssigner) {
-    this.flinkTimestampAssigner = flinkTimestampAssigner;
-  }
-
-  /**
-   * Creates a new unbounded source from a Flink source.
-   * @param flinkSource The Flink source function
-   * @param <T> The type that the source function produces.
-   * @return The wrapped source function.
-   */
-  public static <T> UnboundedSource<T, UnboundedSource.CheckpointMark> of(
-      SourceFunction<T> flinkSource) {
-    return new UnboundedFlinkSource<>(flinkSource);
-  }
-
-  public static <T> UnboundedSource<T, UnboundedSource.CheckpointMark> of(
-          SourceFunction<T> flinkSource, AssignerWithPeriodicWatermarks<T> flinkTimestampAssigner) {
-    return new UnboundedFlinkSource<>(flinkSource, flinkTimestampAssigner);
-  }
-}


[19/50] [abbrv] beam git commit: [BEAM-1922] Close datasource in JdbcIO when possible

Posted by ke...@apache.org.
[BEAM-1922] Close datasource in JdbcIO when possible


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/dc846268
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/dc846268
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/dc846268

Branch: refs/heads/jstorm-runner
Commit: dc84626877a0e7183ed660df167a1d02d1589f90
Parents: 946778c
Author: mingmxu <mi...@ebay.com>
Authored: Mon Apr 10 11:19:02 2017 -0700
Committer: Jean-Baptiste Onofr� <jb...@apache.org>
Committed: Mon Apr 17 18:07:16 2017 +0200

----------------------------------------------------------------------
 .../org/apache/beam/sdk/io/jdbc/JdbcIO.java     | 40 +++++++++++---------
 .../org/apache/beam/sdk/io/jdbc/JdbcIOTest.java | 10 ++---
 2 files changed, 27 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/dc846268/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
index 05a30a4..b26a47d 100644
--- a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
+++ b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
@@ -21,7 +21,6 @@ import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkState;
 
 import com.google.auto.value.AutoValue;
-
 import java.io.Serializable;
 import java.sql.Connection;
 import java.sql.PreparedStatement;
@@ -55,15 +54,13 @@ import org.apache.commons.dbcp2.BasicDataSource;
  * <p>JdbcIO source returns a bounded collection of {@code T} as a {@code PCollection<T>}. T is the
  * type returned by the provided {@link RowMapper}.
  *
- * <p>To configure the JDBC source, you have to provide a {@link DataSourceConfiguration} using
- * {@link DataSourceConfiguration#create(DataSource)} or
- * {@link DataSourceConfiguration#create(String, String)} with either a
- * {@link DataSource} (which must be {@link Serializable}) or the parameters needed to create it
- * (driver class name and url). Optionally, {@link DataSourceConfiguration#withUsername(String)} and
- * {@link DataSourceConfiguration#withPassword(String)} allows you to define DataSource username
- * and password.
- * For example:
+ * <p>To configure the JDBC source, you have to provide a {@link DataSourceConfiguration} using<br>
+ * 1. {@link DataSourceConfiguration#create(DataSource)}(which must be {@link Serializable});<br>
+ * 2. or {@link DataSourceConfiguration#create(String, String)}(driver class name and url).
+ * Optionally, {@link DataSourceConfiguration#withUsername(String)} and
+ * {@link DataSourceConfiguration#withPassword(String)} allows you to define username and password.
  *
+ * <p>For example:
  * <pre>{@code
  * pipeline.apply(JdbcIO.<KV<Integer, String>>read()
  *   .withDataSourceConfiguration(JdbcIO.DataSourceConfiguration.create(
@@ -245,11 +242,9 @@ public class JdbcIO {
       }
     }
 
-    Connection getConnection() throws Exception {
+    DataSource buildDatasource() throws Exception{
       if (getDataSource() != null) {
-        return (getUsername() != null)
-            ? getDataSource().getConnection(getUsername(), getPassword())
-            : getDataSource().getConnection();
+        return getDataSource();
       } else {
         BasicDataSource basicDataSource = new BasicDataSource();
         basicDataSource.setDriverClassName(getDriverClassName());
@@ -259,9 +254,10 @@ public class JdbcIO {
         if (getConnectionProperties() != null) {
           basicDataSource.setConnectionProperties(getConnectionProperties());
         }
-        return basicDataSource.getConnection();
+        return basicDataSource;
       }
     }
+
   }
 
   /**
@@ -368,6 +364,7 @@ public class JdbcIO {
     /** A {@link DoFn} executing the SQL query to read from the database. */
     static class ReadFn<T> extends DoFn<String, T> {
       private JdbcIO.Read<T> spec;
+      private DataSource dataSource;
       private Connection connection;
 
       private ReadFn(Read<T> spec) {
@@ -376,7 +373,8 @@ public class JdbcIO {
 
       @Setup
       public void setup() throws Exception {
-        connection = spec.getDataSourceConfiguration().getConnection();
+        dataSource = spec.getDataSourceConfiguration().buildDatasource();
+        connection = dataSource.getConnection();
       }
 
       @ProcessElement
@@ -396,8 +394,9 @@ public class JdbcIO {
 
       @Teardown
       public void teardown() throws Exception {
-        if (connection != null) {
-          connection.close();
+        connection.close();
+        if (dataSource instanceof AutoCloseable) {
+          ((AutoCloseable) dataSource).close();
         }
       }
     }
@@ -462,6 +461,7 @@ public class JdbcIO {
 
       private final Write<T> spec;
 
+      private DataSource dataSource;
       private Connection connection;
       private PreparedStatement preparedStatement;
       private int batchCount;
@@ -472,7 +472,8 @@ public class JdbcIO {
 
       @Setup
       public void setup() throws Exception {
-        connection = spec.getDataSourceConfiguration().getConnection();
+        dataSource = spec.getDataSourceConfiguration().buildDatasource();
+        connection = dataSource.getConnection();
         connection.setAutoCommit(false);
         preparedStatement = connection.prepareStatement(spec.getStatement());
       }
@@ -516,6 +517,9 @@ public class JdbcIO {
           if (connection != null) {
             connection.close();
           }
+          if (dataSource instanceof AutoCloseable) {
+            ((AutoCloseable) dataSource).close();
+          }
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/beam/blob/dc846268/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java
index 4e82338..984ce1a 100644
--- a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java
+++ b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java
@@ -126,7 +126,7 @@ public class JdbcIOTest implements Serializable {
   @Test
   public void testDataSourceConfigurationDataSource() throws Exception {
     JdbcIO.DataSourceConfiguration config = JdbcIO.DataSourceConfiguration.create(dataSource);
-    try (Connection conn = config.getConnection()) {
+    try (Connection conn = config.buildDatasource().getConnection()) {
       assertTrue(conn.isValid(0));
     }
   }
@@ -136,7 +136,7 @@ public class JdbcIOTest implements Serializable {
     JdbcIO.DataSourceConfiguration config = JdbcIO.DataSourceConfiguration.create(
         "org.apache.derby.jdbc.ClientDriver",
         "jdbc:derby://localhost:" + port + "/target/beam");
-    try (Connection conn = config.getConnection()) {
+    try (Connection conn = config.buildDatasource().getConnection()) {
       assertTrue(conn.isValid(0));
     }
   }
@@ -148,7 +148,7 @@ public class JdbcIOTest implements Serializable {
         "jdbc:derby://localhost:" + port + "/target/beam")
         .withUsername("sa")
         .withPassword("sa");
-    try (Connection conn = config.getConnection()) {
+    try (Connection conn = config.buildDatasource().getConnection()) {
       assertTrue(conn.isValid(0));
     }
   }
@@ -160,7 +160,7 @@ public class JdbcIOTest implements Serializable {
         "jdbc:derby://localhost:" + port + "/target/beam")
         .withUsername("sa")
         .withPassword(null);
-    try (Connection conn = config.getConnection()) {
+    try (Connection conn = config.buildDatasource().getConnection()) {
       assertTrue(conn.isValid(0));
     }
   }
@@ -172,7 +172,7 @@ public class JdbcIOTest implements Serializable {
         "jdbc:derby://localhost:" + port + "/target/beam")
         .withUsername(null)
         .withPassword(null);
-    try (Connection conn = config.getConnection()) {
+    try (Connection conn = config.buildDatasource().getConnection()) {
       assertTrue(conn.isValid(0));
     }
   }


[42/50] [abbrv] beam git commit: [BEAM-59] This closes #2569

Posted by ke...@apache.org.
[BEAM-59] This closes #2569


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/e5568589
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/e5568589
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/e5568589

Branch: refs/heads/jstorm-runner
Commit: e5568589cab11399126f678ad3fbca4b1fb715e4
Parents: c52ce7c b43c92f
Author: Jean-Baptiste Onofr� <jb...@apache.org>
Authored: Tue Apr 18 11:12:39 2017 +0200
Committer: Jean-Baptiste Onofr� <jb...@apache.org>
Committed: Tue Apr 18 11:12:39 2017 +0200

----------------------------------------------------------------------
 .../flink/translation/utils/SerializedPipelineOptions.java    | 2 ++
 .../beam/runners/spark/translation/SparkRuntimeContext.java   | 2 ++
 .../main/java/org/apache/beam/sdk/runners/PipelineRunner.java | 7 +++----
 .../main/java/org/apache/beam/sdk/testing/TestPipeline.java   | 2 ++
 4 files changed, 9 insertions(+), 4 deletions(-)
----------------------------------------------------------------------



[11/50] [abbrv] beam git commit: Update Signature of PTransformOverrideFactory

Posted by ke...@apache.org.
Update Signature of PTransformOverrideFactory

This enables replacements to be reobtained with the entire transform
that is being replaced.

This is required when Side Inputs are part of the input of the
PTransform Application, as PTransforms are not applied to their side
inputs.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/f3b49605
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/f3b49605
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/f3b49605

Branch: refs/heads/jstorm-runner
Commit: f3b496053d2596ee1b2de55f6da055b478a0d6d3
Parents: 3c2b855
Author: Thomas Groh <tg...@google.com>
Authored: Wed Mar 29 15:23:21 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Fri Apr 14 16:52:03 2017 -0700

----------------------------------------------------------------------
 .../apache/beam/runners/apex/ApexRunner.java    |  32 +++--
 .../DeduplicatedFlattenFactory.java             |  63 +++++----
 .../EmptyFlattenAsCreateFactory.java            |  20 ++-
 .../core/construction/PTransformMatchers.java   |   2 -
 .../construction/PTransformReplacements.java    |  69 ++++++++++
 .../core/construction/PrimitiveCreate.java      |  13 +-
 .../SingleInputOutputOverrideFactory.java       |   9 +-
 .../UnsupportedOverrideFactory.java             |  14 +-
 .../DeduplicatedFlattenFactoryTest.java         |  18 +--
 .../EmptyFlattenAsCreateFactoryTest.java        |  36 ++++-
 .../PTransformReplacementsTest.java             | 131 +++++++++++++++++++
 .../SingleInputOutputOverrideFactoryTest.java   |  31 ++---
 .../UnsupportedOverrideFactoryTest.java         |  11 +-
 ...ectGBKIntoKeyedWorkItemsOverrideFactory.java |  16 ++-
 .../direct/DirectGroupByKeyOverrideFactory.java |  14 +-
 .../direct/ParDoMultiOverrideFactory.java       |  22 ++--
 .../direct/TestStreamEvaluatorFactory.java      |  14 +-
 .../runners/direct/ViewOverrideFactory.java     |  18 +--
 .../direct/WriteWithShardingFactory.java        |  16 +--
 .../DirectGroupByKeyOverrideFactoryTest.java    |  12 +-
 .../direct/ParDoMultiOverrideFactoryTest.java   |  45 -------
 .../direct/TestStreamEvaluatorFactoryTest.java  |  12 --
 .../runners/direct/ViewOverrideFactoryTest.java |  42 ++++--
 .../direct/WriteWithShardingFactoryTest.java    |  23 ++--
 .../flink/FlinkStreamingPipelineTranslator.java |  56 ++++----
 .../dataflow/BatchStatefulParDoOverrides.java   |  42 +++---
 .../runners/dataflow/BatchViewOverrides.java    |  17 ++-
 .../beam/runners/dataflow/DataflowRunner.java   |  92 ++++++-------
 .../dataflow/PrimitiveParDoSingleFactory.java   |  15 ++-
 .../dataflow/ReshuffleOverrideFactory.java      |  12 +-
 .../dataflow/StreamingViewOverrides.java        |  14 +-
 .../PrimitiveParDoSingleFactoryTest.java        |  59 +++++++--
 .../beam/runners/spark/TestSparkRunner.java     |  14 +-
 .../main/java/org/apache/beam/sdk/Pipeline.java |  15 ++-
 .../sdk/runners/PTransformOverrideFactory.java  |  33 +++--
 .../beam/sdk/transforms/AppliedPTransform.java  |   5 +
 .../java/org/apache/beam/sdk/PipelineTest.java  |  33 ++---
 37 files changed, 675 insertions(+), 415 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/apex/src/main/java/org/apache/beam/runners/apex/ApexRunner.java
----------------------------------------------------------------------
diff --git a/runners/apex/src/main/java/org/apache/beam/runners/apex/ApexRunner.java b/runners/apex/src/main/java/org/apache/beam/runners/apex/ApexRunner.java
index 1c99f8d..1c845c6 100644
--- a/runners/apex/src/main/java/org/apache/beam/runners/apex/ApexRunner.java
+++ b/runners/apex/src/main/java/org/apache/beam/runners/apex/ApexRunner.java
@@ -39,6 +39,7 @@ import org.apache.apex.api.Launcher.AppHandle;
 import org.apache.apex.api.Launcher.LaunchMode;
 import org.apache.beam.runners.apex.translation.ApexPipelineTranslator;
 import org.apache.beam.runners.core.construction.PTransformMatchers;
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.runners.core.construction.PrimitiveCreate;
 import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
 import org.apache.beam.sdk.Pipeline;
@@ -49,6 +50,7 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsValidator;
 import org.apache.beam.sdk.runners.PTransformOverride;
 import org.apache.beam.sdk.runners.PipelineRunner;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.Combine.GloballyAsSingletonView;
 import org.apache.beam.sdk.transforms.Create;
@@ -258,9 +260,15 @@ public class ApexRunner extends PipelineRunner<ApexRunnerResult> {
             PCollection<InputT>, PCollectionView<OutputT>,
             Combine.GloballyAsSingletonView<InputT, OutputT>> {
       @Override
-      public PTransform<PCollection<InputT>, PCollectionView<OutputT>> getReplacementTransform(
-          GloballyAsSingletonView<InputT, OutputT> transform) {
-        return new StreamingCombineGloballyAsSingletonView<>(transform);
+      public PTransformReplacement<PCollection<InputT>, PCollectionView<OutputT>>
+          getReplacementTransform(
+              AppliedPTransform<
+                      PCollection<InputT>, PCollectionView<OutputT>,
+                      GloballyAsSingletonView<InputT, OutputT>>
+                  transform) {
+        return PTransformReplacement.of(
+            PTransformReplacements.getSingletonMainInput(transform),
+            new StreamingCombineGloballyAsSingletonView<>(transform.getTransform()));
       }
     }
   }
@@ -321,9 +329,11 @@ public class ApexRunner extends PipelineRunner<ApexRunnerResult> {
         extends SingleInputOutputOverrideFactory<
             PCollection<T>, PCollectionView<T>, View.AsSingleton<T>> {
       @Override
-      public PTransform<PCollection<T>, PCollectionView<T>> getReplacementTransform(
-          AsSingleton<T> transform) {
-        return new StreamingViewAsSingleton<>(transform);
+      public PTransformReplacement<PCollection<T>, PCollectionView<T>> getReplacementTransform(
+          AppliedPTransform<PCollection<T>, PCollectionView<T>, AsSingleton<T>> transform) {
+        return PTransformReplacement.of(
+            PTransformReplacements.getSingletonMainInput(transform),
+            new StreamingViewAsSingleton<>(transform.getTransform()));
       }
     }
   }
@@ -352,9 +362,13 @@ public class ApexRunner extends PipelineRunner<ApexRunnerResult> {
         extends SingleInputOutputOverrideFactory<
             PCollection<T>, PCollectionView<Iterable<T>>, View.AsIterable<T>> {
       @Override
-      public PTransform<PCollection<T>, PCollectionView<Iterable<T>>> getReplacementTransform(
-          AsIterable<T> transform) {
-        return new StreamingViewAsIterable<>();
+      public PTransformReplacement<PCollection<T>, PCollectionView<Iterable<T>>>
+          getReplacementTransform(
+              AppliedPTransform<PCollection<T>, PCollectionView<Iterable<T>>, AsIterable<T>>
+                  transform) {
+        return PTransformReplacement.of(
+            PTransformReplacements.getSingletonMainInput(transform),
+            new StreamingViewAsIterable<T>());
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DeduplicatedFlattenFactory.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DeduplicatedFlattenFactory.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DeduplicatedFlattenFactory.java
index c12c548..13e7593 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DeduplicatedFlattenFactory.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DeduplicatedFlattenFactory.java
@@ -18,10 +18,12 @@
 
 package org.apache.beam.runners.core.construction;
 
+import com.google.common.annotations.VisibleForTesting;
 import java.util.HashMap;
 import java.util.Map;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.Flatten;
 import org.apache.beam.sdk.transforms.Flatten.PCollections;
@@ -47,32 +49,11 @@ public class DeduplicatedFlattenFactory<T>
   private DeduplicatedFlattenFactory() {}
 
   @Override
-  public PTransform<PCollectionList<T>, PCollection<T>> getReplacementTransform(
-      PCollections<T> transform) {
-    return new PTransform<PCollectionList<T>, PCollection<T>>() {
-      @Override
-      public PCollection<T> expand(PCollectionList<T> input) {
-        Map<PCollection<T>, Integer> instances = new HashMap<>();
-        for (PCollection<T> pCollection : input.getAll()) {
-          int existing = instances.get(pCollection) == null ? 0 : instances.get(pCollection);
-          instances.put(pCollection, existing + 1);
-        }
-        PCollectionList<T> output = PCollectionList.empty(input.getPipeline());
-        for (Map.Entry<PCollection<T>, Integer> instanceEntry : instances.entrySet()) {
-          if (instanceEntry.getValue().equals(1)) {
-            output = output.and(instanceEntry.getKey());
-          } else {
-            String duplicationName = String.format("Multiply %s", instanceEntry.getKey().getName());
-            PCollection<T> duplicated =
-                instanceEntry
-                    .getKey()
-                    .apply(duplicationName, ParDo.of(new DuplicateFn<T>(instanceEntry.getValue())));
-            output = output.and(duplicated);
-          }
-        }
-        return output.apply(Flatten.<T>pCollections());
-      }
-    };
+  public PTransformReplacement<PCollectionList<T>, PCollection<T>> getReplacementTransform(
+      AppliedPTransform<PCollectionList<T>, PCollection<T>, PCollections<T>> transform) {
+    return PTransformReplacement.of(
+        getInput(transform.getInputs(), transform.getPipeline()),
+        new FlattenWithoutDuplicateInputs<T>());
   }
 
   /**
@@ -80,8 +61,7 @@ public class DeduplicatedFlattenFactory<T>
    *
    * <p>The input {@link PCollectionList} that is constructed will have the same values in the same
    */
-  @Override
-  public PCollectionList<T> getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
+  private PCollectionList<T> getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
     PCollectionList<T> pCollections = PCollectionList.empty(p);
     for (PValue input : inputs.values()) {
       PCollection<T> pcollection = (PCollection<T>) input;
@@ -96,6 +76,33 @@ public class DeduplicatedFlattenFactory<T>
     return ReplacementOutputs.singleton(outputs, newOutput);
   }
 
+  @VisibleForTesting
+  static class FlattenWithoutDuplicateInputs<T>
+      extends PTransform<PCollectionList<T>, PCollection<T>> {
+    @Override
+    public PCollection<T> expand(PCollectionList<T> input) {
+      Map<PCollection<T>, Integer> instances = new HashMap<>();
+      for (PCollection<T> pCollection : input.getAll()) {
+        int existing = instances.get(pCollection) == null ? 0 : instances.get(pCollection);
+        instances.put(pCollection, existing + 1);
+      }
+      PCollectionList<T> output = PCollectionList.empty(input.getPipeline());
+      for (Map.Entry<PCollection<T>, Integer> instanceEntry : instances.entrySet()) {
+        if (instanceEntry.getValue().equals(1)) {
+          output = output.and(instanceEntry.getKey());
+        } else {
+          String duplicationName = String.format("Multiply %s", instanceEntry.getKey().getName());
+          PCollection<T> duplicated =
+              instanceEntry
+                  .getKey()
+                  .apply(duplicationName, ParDo.of(new DuplicateFn<T>(instanceEntry.getValue())));
+          output = output.and(duplicated);
+        }
+      }
+      return output.apply(Flatten.<T>pCollections());
+    }
+  }
+
   private static class DuplicateFn<T> extends DoFn<T, T> {
     private final int numTimes;
 

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/EmptyFlattenAsCreateFactory.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/EmptyFlattenAsCreateFactory.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/EmptyFlattenAsCreateFactory.java
index 936bc08..a6982d4 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/EmptyFlattenAsCreateFactory.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/EmptyFlattenAsCreateFactory.java
@@ -21,11 +21,12 @@ package org.apache.beam.runners.core.construction;
 import static com.google.common.base.Preconditions.checkArgument;
 
 import java.util.Map;
-import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.VoidCoder;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.Flatten.PCollections;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionList;
@@ -49,20 +50,15 @@ public class EmptyFlattenAsCreateFactory<T>
   private EmptyFlattenAsCreateFactory() {}
 
   @Override
-  public PTransform<PCollectionList<T>, PCollection<T>> getReplacementTransform(
-      Flatten.PCollections<T> transform) {
-    return new CreateEmptyFromList<>();
-  }
-
-  @Override
-  public PCollectionList<T> getInput(
-      Map<TupleTag<?>, PValue> inputs, Pipeline p) {
+  public PTransformReplacement<PCollectionList<T>, PCollection<T>> getReplacementTransform(
+      AppliedPTransform<PCollectionList<T>, PCollection<T>, PCollections<T>> transform) {
     checkArgument(
-        inputs.isEmpty(),
+        transform.getInputs().isEmpty(),
         "Unexpected nonempty input %s for %s",
-        inputs,
+        transform.getInputs(),
         getClass().getSimpleName());
-    return PCollectionList.empty(p);
+    return PTransformReplacement.of(
+        PCollectionList.<T>empty(transform.getPipeline()), new CreateEmptyFromList<T>());
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformMatchers.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformMatchers.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformMatchers.java
index 94ec38c..09946bc 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformMatchers.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformMatchers.java
@@ -52,8 +52,6 @@ public class PTransformMatchers {
   /**
    * Returns a {@link PTransformMatcher} that matches a {@link PTransform} if the class of the
    * {@link PTransform} is equal to the {@link Class} provided ot this matcher.
-   * @param clazz
-   * @return
    */
   public static PTransformMatcher classEqualTo(Class<? extends PTransform> clazz) {
     return new EqualClassPTransformMatcher(clazz);

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformReplacements.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformReplacements.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformReplacements.java
new file mode 100644
index 0000000..72a3425
--- /dev/null
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformReplacements.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.runners.core.construction;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+import java.util.Map;
+import java.util.Set;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
+
+/**
+ */
+public class PTransformReplacements {
+  /**
+   * Gets the singleton input of an {@link AppliedPTransform}, ignoring any additional inputs
+   * returned by {@link PTransform#getAdditionalInputs()}.
+   */
+  public static <T> PCollection<T> getSingletonMainInput(
+      AppliedPTransform<? extends PCollection<? extends T>, ?, ?> application) {
+    return getSingletonMainInput(
+        application.getInputs(), application.getTransform().getAdditionalInputs().keySet());
+  }
+
+  private static <T> PCollection<T> getSingletonMainInput(
+      Map<TupleTag<?>, PValue> inputs, Set<TupleTag<?>> ignoredTags) {
+    PCollection<T> mainInput = null;
+    for (Map.Entry<TupleTag<?>, PValue> input : inputs.entrySet()) {
+      if (!ignoredTags.contains(input.getKey())) {
+        checkArgument(
+            mainInput == null,
+            "Got multiple inputs that are not additional inputs for a "
+                + "singleton main input: %s and %s",
+            mainInput,
+            input.getValue());
+        checkArgument(
+            input.getValue() instanceof PCollection,
+            "Unexpected input type %s",
+            input.getValue().getClass());
+        mainInput = (PCollection<T>) input.getValue();
+      }
+    }
+    checkArgument(
+        mainInput != null,
+        "No main input found in inputs: Inputs %s, Side Input tags %s",
+        inputs,
+        ignoredTags);
+    return mainInput;
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PrimitiveCreate.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PrimitiveCreate.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PrimitiveCreate.java
index 9335f3a..5a2140b 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PrimitiveCreate.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PrimitiveCreate.java
@@ -19,8 +19,8 @@
 package org.apache.beam.runners.core.construction;
 
 import java.util.Map;
-import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.transforms.Create.Values;
 import org.apache.beam.sdk.transforms.PTransform;
@@ -57,13 +57,10 @@ public class PrimitiveCreate<T> extends PTransform<PBegin, PCollection<T>> {
   public static class Factory<T>
       implements PTransformOverrideFactory<PBegin, PCollection<T>, Values<T>> {
     @Override
-    public PTransform<PBegin, PCollection<T>> getReplacementTransform(Values<T> transform) {
-      return new PrimitiveCreate<>(transform);
-    }
-
-    @Override
-    public PBegin getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-      return p.begin();
+    public PTransformReplacement<PBegin, PCollection<T>> getReplacementTransform(
+        AppliedPTransform<PBegin, PCollection<T>, Values<T>> transform) {
+      return PTransformReplacement.of(
+          transform.getPipeline().begin(), new PrimitiveCreate<T>(transform.getTransform()));
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SingleInputOutputOverrideFactory.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SingleInputOutputOverrideFactory.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SingleInputOutputOverrideFactory.java
index 6d0d571..7a59c1c 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SingleInputOutputOverrideFactory.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SingleInputOutputOverrideFactory.java
@@ -18,9 +18,7 @@
 
 package org.apache.beam.runners.core.construction;
 
-import com.google.common.collect.Iterables;
 import java.util.Map;
-import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.values.PValue;
@@ -28,7 +26,7 @@ import org.apache.beam.sdk.values.TupleTag;
 
 /**
  * A {@link PTransformOverrideFactory} which consumes from a {@link PValue} and produces a
- * {@link PValue}. {@link #getInput(Map, Pipeline)} and {@link #mapOutputs(Map, PValue)} are
+ * {@link PValue}. {@link #mapOutputs(Map, PValue)} is
  * implemented.
  */
 public abstract class SingleInputOutputOverrideFactory<
@@ -37,11 +35,6 @@ public abstract class SingleInputOutputOverrideFactory<
         TransformT extends PTransform<InputT, OutputT>>
     implements PTransformOverrideFactory<InputT, OutputT, TransformT> {
   @Override
-  public final InputT getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-    return (InputT) Iterables.getOnlyElement(inputs.values());
-  }
-
-  @Override
   public final Map<PValue, ReplacementOutput> mapOutputs(
       Map<TupleTag<?>, PValue> outputs, OutputT newOutput) {
     return ReplacementOutputs.singleton(outputs, newOutput);

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/UnsupportedOverrideFactory.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/UnsupportedOverrideFactory.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/UnsupportedOverrideFactory.java
index 7b9d704..efafa33 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/UnsupportedOverrideFactory.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/UnsupportedOverrideFactory.java
@@ -19,8 +19,8 @@
 package org.apache.beam.runners.core.construction;
 
 import java.util.Map;
-import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.values.PInput;
 import org.apache.beam.sdk.values.POutput;
@@ -29,8 +29,8 @@ import org.apache.beam.sdk.values.TupleTag;
 
 /**
  * A {@link PTransformOverrideFactory} that throws an exception when a call to
- * {@link #getReplacementTransform(PTransform)} is made. This is for {@link PTransform PTransforms}
- * which are not supported by a runner.
+ * {@link #getReplacementTransform(AppliedPTransform)} is made. This is for
+ * {@link PTransform PTransforms} which are not supported by a runner.
  */
 public final class UnsupportedOverrideFactory<
         InputT extends PInput,
@@ -54,12 +54,8 @@ public final class UnsupportedOverrideFactory<
   }
 
   @Override
-  public PTransform<InputT, OutputT> getReplacementTransform(TransformT transform) {
-    throw new UnsupportedOperationException(message);
-  }
-
-  @Override
-  public InputT getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
+  public PTransformReplacement<InputT, OutputT> getReplacementTransform(
+      AppliedPTransform<InputT, OutputT, TransformT> transform) {
     throw new UnsupportedOperationException(message);
   }
 

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/DeduplicatedFlattenFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/DeduplicatedFlattenFactoryTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/DeduplicatedFlattenFactoryTest.java
index 14aa1e6..4e08c21 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/DeduplicatedFlattenFactoryTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/DeduplicatedFlattenFactoryTest.java
@@ -22,6 +22,7 @@ import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.not;
 import static org.junit.Assert.assertThat;
 
+import org.apache.beam.runners.core.construction.DeduplicatedFlattenFactory.FlattenWithoutDuplicateInputs;
 import org.apache.beam.sdk.Pipeline.PipelineVisitor.Defaults;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory.ReplacementOutput;
 import org.apache.beam.sdk.runners.TransformHierarchy;
@@ -56,7 +57,7 @@ public class DeduplicatedFlattenFactoryTest {
   @Test
   public void duplicatesInsertsMultipliers() {
     PTransform<PCollectionList<String>, PCollection<String>> replacement =
-        factory.getReplacementTransform(Flatten.<String>pCollections());
+        new DeduplicatedFlattenFactory.FlattenWithoutDuplicateInputs<>();
     final PCollectionList<String> inputList =
         PCollectionList.of(first).and(second).and(first).and(first);
     inputList.apply(replacement);
@@ -74,10 +75,10 @@ public class DeduplicatedFlattenFactoryTest {
   @Test
   @Category(NeedsRunner.class)
   public void testOverride() {
-    PTransform<PCollectionList<String>, PCollection<String>> replacement =
-        factory.getReplacementTransform(Flatten.<String>pCollections());
     final PCollectionList<String> inputList =
         PCollectionList.of(first).and(second).and(first).and(first);
+    PTransform<PCollectionList<String>, PCollection<String>> replacement =
+        new FlattenWithoutDuplicateInputs<>();
     PCollection<String> flattened = inputList.apply(replacement);
 
     PAssert.that(flattened).containsInAnyOrder("one", "two", "one", "one");
@@ -85,21 +86,12 @@ public class DeduplicatedFlattenFactoryTest {
   }
 
   @Test
-  public void inputReconstruction() {
-    final PCollectionList<String> inputList =
-        PCollectionList.of(first).and(second).and(first).and(first);
-
-    assertThat(factory.getInput(inputList.expand(), pipeline), equalTo(inputList));
-  }
-
-  @Test
   public void outputMapping() {
     final PCollectionList<String> inputList =
         PCollectionList.of(first).and(second).and(first).and(first);
     PCollection<String> original =
         inputList.apply(Flatten.<String>pCollections());
-    PCollection<String> replacement =
-        inputList.apply(factory.getReplacementTransform(Flatten.<String>pCollections()));
+    PCollection<String> replacement = inputList.apply(new FlattenWithoutDuplicateInputs<String>());
 
     assertThat(
         factory.mapOutputs(original.expand(), replacement),

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/EmptyFlattenAsCreateFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/EmptyFlattenAsCreateFactoryTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/EmptyFlattenAsCreateFactoryTest.java
index 90bbee7..ae2d0a9 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/EmptyFlattenAsCreateFactoryTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/EmptyFlattenAsCreateFactoryTest.java
@@ -18,17 +18,20 @@
 
 package org.apache.beam.runners.core.construction;
 
-import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.emptyIterable;
 import static org.junit.Assert.assertThat;
 
 import java.util.Collections;
 import java.util.Map;
 import org.apache.beam.sdk.io.CountingInput;
+import org.apache.beam.sdk.runners.PTransformOverrideFactory.PTransformReplacement;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory.ReplacementOutput;
 import org.apache.beam.sdk.testing.NeedsRunner;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Flatten;
+import org.apache.beam.sdk.transforms.Flatten.PCollections;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionList;
 import org.apache.beam.sdk.values.PValue;
@@ -54,8 +57,15 @@ public class EmptyFlattenAsCreateFactoryTest {
 
   @Test
   public void getInputEmptySucceeds() {
-    assertThat(
-        factory.getInput(Collections.<TupleTag<?>, PValue>emptyMap(), pipeline).size(), equalTo(0));
+    PTransformReplacement<PCollectionList<Long>, PCollection<Long>> replacement =
+        factory.getReplacementTransform(
+            AppliedPTransform.<PCollectionList<Long>, PCollection<Long>, PCollections<Long>>of(
+                "nonEmptyInput",
+                Collections.<TupleTag<?>, PValue>emptyMap(),
+                Collections.<TupleTag<?>, PValue>emptyMap(),
+                Flatten.<Long>pCollections(),
+                pipeline));
+    assertThat(replacement.getInput().getAll(), emptyIterable());
   }
 
   @Test
@@ -66,7 +76,13 @@ public class EmptyFlattenAsCreateFactoryTest {
     thrown.expect(IllegalArgumentException.class);
     thrown.expectMessage(nonEmpty.expand().toString());
     thrown.expectMessage(EmptyFlattenAsCreateFactory.class.getSimpleName());
-    factory.getInput(nonEmpty.expand(), pipeline);
+    factory.getReplacementTransform(
+        AppliedPTransform.<PCollectionList<Long>, PCollection<Long>, Flatten.PCollections<Long>>of(
+            "nonEmptyInput",
+            nonEmpty.expand(),
+            Collections.<TupleTag<?>, PValue>emptyMap(),
+            Flatten.<Long>pCollections(),
+            pipeline));
   }
 
   @Test
@@ -89,7 +105,17 @@ public class EmptyFlattenAsCreateFactoryTest {
   public void testOverride() {
     PCollectionList<Long> empty = PCollectionList.empty(pipeline);
     PCollection<Long> emptyFlattened =
-        empty.apply(factory.getReplacementTransform(Flatten.<Long>pCollections()));
+        empty.apply(
+            factory
+                .getReplacementTransform(
+                    AppliedPTransform
+                        .<PCollectionList<Long>, PCollection<Long>, Flatten.PCollections<Long>>of(
+                            "nonEmptyInput",
+                            Collections.<TupleTag<?>, PValue>emptyMap(),
+                            Collections.<TupleTag<?>, PValue>emptyMap(),
+                            Flatten.<Long>pCollections(),
+                            pipeline))
+                .getTransform());
     PAssert.that(emptyFlattened).empty();
     pipeline.run();
   }

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformReplacementsTest.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformReplacementsTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformReplacementsTest.java
new file mode 100644
index 0000000..b065617
--- /dev/null
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformReplacementsTest.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.runners.core.construction;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.junit.Assert.assertThat;
+
+import com.google.common.collect.ImmutableMap;
+import java.util.Collections;
+import org.apache.beam.sdk.io.CountingInput;
+import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
+import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.View;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/**
+ * Tests for {@link PTransformReplacements}.
+ */
+@RunWith(JUnit4.class)
+public class PTransformReplacementsTest {
+  @Rule public TestPipeline pipeline = TestPipeline.create().enableAbandonedNodeEnforcement(false);
+  @Rule public ExpectedException thrown = ExpectedException.none();
+  private PCollection<Long> mainInput = pipeline.apply(CountingInput.unbounded());
+  private PCollectionView<String> sideInput =
+      pipeline.apply(Create.of("foo")).apply(View.<String>asSingleton());
+
+  private PCollection<Long> output = mainInput.apply(ParDo.of(new TestDoFn()));
+
+  @Test
+  public void getMainInputSingleOutputSingleInput() {
+    AppliedPTransform<PCollection<Long>, ?, ?> application =
+        AppliedPTransform.of(
+            "application",
+            Collections.<TupleTag<?>, PValue>singletonMap(new TupleTag<Long>(), mainInput),
+            Collections.<TupleTag<?>, PValue>singletonMap(new TupleTag<Long>(), output),
+            ParDo.of(new TestDoFn()),
+            pipeline);
+    PCollection<Long> input = PTransformReplacements.getSingletonMainInput(application);
+    assertThat(input, equalTo(mainInput));
+  }
+
+  @Test
+  public void getMainInputSingleOutputSideInputs() {
+    AppliedPTransform<PCollection<Long>, ?, ?> application =
+        AppliedPTransform.of(
+            "application",
+            ImmutableMap.<TupleTag<?>, PValue>builder()
+                .put(new TupleTag<Long>(), mainInput)
+                .put(sideInput.getTagInternal(), sideInput.getPCollection())
+                .build(),
+            Collections.<TupleTag<?>, PValue>singletonMap(new TupleTag<Long>(), output),
+            ParDo.of(new TestDoFn()).withSideInputs(sideInput),
+            pipeline);
+    PCollection<Long> input = PTransformReplacements.getSingletonMainInput(application);
+    assertThat(input, equalTo(mainInput));
+  }
+
+  @Test
+  public void getMainInputExtraMainInputsThrows() {
+    PCollection<Long> notInParDo = pipeline.apply("otherPCollection", Create.of(1L, 2L, 3L));
+    ImmutableMap<TupleTag<?>, PValue> inputs =
+        ImmutableMap.<TupleTag<?>, PValue>builder()
+            .putAll(mainInput.expand())
+            // Not represnted as an input
+            .put(new TupleTag<Long>(), notInParDo)
+            .put(sideInput.getTagInternal(), sideInput.getPCollection())
+            .build();
+    AppliedPTransform<PCollection<Long>, ?, ?> application =
+        AppliedPTransform.of(
+            "application",
+            inputs,
+            Collections.<TupleTag<?>, PValue>singletonMap(new TupleTag<Long>(), output),
+            ParDo.of(new TestDoFn()).withSideInputs(sideInput),
+            pipeline);
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("multiple inputs");
+    thrown.expectMessage("not additional inputs");
+    thrown.expectMessage(mainInput.toString());
+    thrown.expectMessage(notInParDo.toString());
+    PTransformReplacements.getSingletonMainInput(application);
+  }
+
+  @Test
+  public void getMainInputNoMainInputsThrows() {
+    ImmutableMap<TupleTag<?>, PValue> inputs =
+        ImmutableMap.<TupleTag<?>, PValue>builder()
+            .put(sideInput.getTagInternal(), sideInput.getPCollection())
+            .build();
+    AppliedPTransform<PCollection<Long>, ?, ?> application =
+        AppliedPTransform.of(
+            "application",
+            inputs,
+            Collections.<TupleTag<?>, PValue>singletonMap(new TupleTag<Long>(), output),
+            ParDo.of(new TestDoFn()).withSideInputs(sideInput),
+            pipeline);
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("No main input");
+    PTransformReplacements.getSingletonMainInput(application);
+  }
+
+  private static class TestDoFn extends DoFn<Long, Long> {
+    @ProcessElement public void process(ProcessContext context) {}
+  }
+}

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/SingleInputOutputOverrideFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/SingleInputOutputOverrideFactoryTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/SingleInputOutputOverrideFactoryTest.java
index 07352f5..acca5cd 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/SingleInputOutputOverrideFactoryTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/SingleInputOutputOverrideFactoryTest.java
@@ -24,9 +24,9 @@ import java.io.Serializable;
 import java.util.Map;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory.ReplacementOutput;
 import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.transforms.MapElements;
-import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.SimpleFunction;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionList;
@@ -55,9 +55,15 @@ public class SingleInputOutputOverrideFactoryTest implements Serializable {
               PCollection<? extends Integer>, PCollection<Integer>,
               MapElements<Integer, Integer>>() {
             @Override
-            public PTransform<PCollection<? extends Integer>, PCollection<Integer>>
-                getReplacementTransform(MapElements<Integer, Integer> transform) {
-              return transform;
+            public PTransformReplacement<PCollection<? extends Integer>, PCollection<Integer>>
+                getReplacementTransform(
+                    AppliedPTransform<
+                            PCollection<? extends Integer>, PCollection<Integer>,
+                            MapElements<Integer, Integer>>
+                        transform) {
+              return PTransformReplacement.of(
+                  PTransformReplacements.getSingletonMainInput(transform),
+                  transform.getTransform());
             }
           };
 
@@ -69,23 +75,6 @@ public class SingleInputOutputOverrideFactoryTest implements Serializable {
     };
 
   @Test
-  public void testGetInput() {
-    PCollection<Integer> input = pipeline.apply(Create.of(1, 2, 3));
-    assertThat(
-        factory.getInput(input.expand(), pipeline),
-        Matchers.<PCollection<? extends Integer>>equalTo(input));
-  }
-
-  @Test
-  public void testGetInputMultipleInputsFails() {
-    PCollection<Integer> input = pipeline.apply(Create.of(1, 2, 3));
-    PCollection<Integer> otherInput = pipeline.apply("OtherCreate", Create.of(1, 2, 3));
-
-    thrown.expect(IllegalArgumentException.class);
-    factory.getInput(PCollectionList.of(input).and(otherInput).expand(), pipeline);
-  }
-
-  @Test
   public void testMapOutputs() {
     PCollection<Integer> input = pipeline.apply(Create.of(1, 2, 3));
     PCollection<Integer> output = input.apply("Map", MapElements.via(fn));

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/UnsupportedOverrideFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/UnsupportedOverrideFactoryTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/UnsupportedOverrideFactoryTest.java
index 81ce00d..6d3b263 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/UnsupportedOverrideFactoryTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/UnsupportedOverrideFactoryTest.java
@@ -19,9 +19,7 @@
 package org.apache.beam.runners.core.construction;
 
 import java.util.Collections;
-import org.apache.beam.sdk.coders.VoidCoder;
 import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.values.PDone;
 import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.TupleTag;
@@ -47,14 +45,7 @@ public class UnsupportedOverrideFactoryTest {
   public void getReplacementTransformThrows() {
     thrown.expect(UnsupportedOperationException.class);
     thrown.expectMessage(message);
-    factory.getReplacementTransform(Create.empty(VoidCoder.of()));
-  }
-
-  @Test
-  public void getInputThrows() {
-    thrown.expect(UnsupportedOperationException.class);
-    thrown.expectMessage(message);
-    factory.getInput(Collections.<TupleTag<?>, PValue>emptyMap(), pipeline);
+    factory.getReplacementTransform(null);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectGBKIntoKeyedWorkItemsOverrideFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectGBKIntoKeyedWorkItemsOverrideFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectGBKIntoKeyedWorkItemsOverrideFactory.java
index bb90a6c..1120243 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectGBKIntoKeyedWorkItemsOverrideFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectGBKIntoKeyedWorkItemsOverrideFactory.java
@@ -19,8 +19,9 @@ package org.apache.beam.runners.direct;
 
 import org.apache.beam.runners.core.KeyedWorkItem;
 import org.apache.beam.runners.core.SplittableParDo.GBKIntoKeyedWorkItems;
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
-import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
 
@@ -33,8 +34,15 @@ class DirectGBKIntoKeyedWorkItemsOverrideFactory<KeyT, InputT>
         PCollection<KV<KeyT, InputT>>, PCollection<KeyedWorkItem<KeyT, InputT>>,
         GBKIntoKeyedWorkItems<KeyT, InputT>> {
   @Override
-  public PTransform<PCollection<KV<KeyT, InputT>>, PCollection<KeyedWorkItem<KeyT, InputT>>>
-      getReplacementTransform(GBKIntoKeyedWorkItems<KeyT, InputT> transform) {
-    return new DirectGroupByKey.DirectGroupByKeyOnly<>();
+  public PTransformReplacement<
+          PCollection<KV<KeyT, InputT>>, PCollection<KeyedWorkItem<KeyT, InputT>>>
+      getReplacementTransform(
+          AppliedPTransform<
+                  PCollection<KV<KeyT, InputT>>, PCollection<KeyedWorkItem<KeyT, InputT>>,
+                  GBKIntoKeyedWorkItems<KeyT, InputT>>
+              transform) {
+    return PTransformReplacement.of(
+        PTransformReplacements.getSingletonMainInput(transform),
+        new DirectGroupByKey.DirectGroupByKeyOnly<KeyT, InputT>());
   }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectGroupByKeyOverrideFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectGroupByKeyOverrideFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectGroupByKeyOverrideFactory.java
index f3b718f..4eb0363 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectGroupByKeyOverrideFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectGroupByKeyOverrideFactory.java
@@ -17,10 +17,11 @@
  */
 package org.apache.beam.runners.direct;
 
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
 
@@ -29,8 +30,13 @@ final class DirectGroupByKeyOverrideFactory<K, V>
     extends SingleInputOutputOverrideFactory<
         PCollection<KV<K, V>>, PCollection<KV<K, Iterable<V>>>, GroupByKey<K, V>> {
   @Override
-  public PTransform<PCollection<KV<K, V>>, PCollection<KV<K, Iterable<V>>>> getReplacementTransform(
-      GroupByKey<K, V> transform) {
-    return new DirectGroupByKey<>(transform);
+  public PTransformReplacement<PCollection<KV<K, V>>, PCollection<KV<K, Iterable<V>>>>
+      getReplacementTransform(
+          AppliedPTransform<
+                  PCollection<KV<K, V>>, PCollection<KV<K, Iterable<V>>>, GroupByKey<K, V>>
+              transform) {
+    return PTransformReplacement.of(
+        PTransformReplacements.getSingletonMainInput(transform),
+        new DirectGroupByKey<>(transform.getTransform()));
   }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactory.java
index 366777b..b08aa8e 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactory.java
@@ -19,18 +19,18 @@ package org.apache.beam.runners.direct;
 
 import static com.google.common.base.Preconditions.checkState;
 
-import com.google.common.collect.Iterables;
 import java.util.Map;
 import org.apache.beam.runners.core.KeyedWorkItem;
 import org.apache.beam.runners.core.KeyedWorkItemCoder;
 import org.apache.beam.runners.core.KeyedWorkItems;
 import org.apache.beam.runners.core.SplittableParDo;
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.runners.core.construction.ReplacementOutputs;
-import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.CannotProvideCoderException;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.KvCoder;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.PTransform;
@@ -62,8 +62,18 @@ class ParDoMultiOverrideFactory<InputT, OutputT>
     implements PTransformOverrideFactory<
         PCollection<? extends InputT>, PCollectionTuple, MultiOutput<InputT, OutputT>> {
   @Override
+  public PTransformReplacement<PCollection<? extends InputT>, PCollectionTuple>
+      getReplacementTransform(
+          AppliedPTransform<
+                  PCollection<? extends InputT>, PCollectionTuple, MultiOutput<InputT, OutputT>>
+              transform) {
+    return PTransformReplacement.of(
+        PTransformReplacements.getSingletonMainInput(transform),
+        getReplacementTransform(transform.getTransform()));
+  }
+
   @SuppressWarnings("unchecked")
-  public PTransform<PCollection<? extends InputT>, PCollectionTuple> getReplacementTransform(
+  private PTransform<PCollection<? extends InputT>, PCollectionTuple> getReplacementTransform(
       MultiOutput<InputT, OutputT> transform) {
 
     DoFn<InputT, OutputT> fn = transform.getFn();
@@ -84,12 +94,6 @@ class ParDoMultiOverrideFactory<InputT, OutputT>
   }
 
   @Override
-  public PCollection<? extends InputT> getInput(
-      Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-    return (PCollection<? extends InputT>) Iterables.getOnlyElement(inputs.values());
-  }
-
-  @Override
   public Map<PValue, ReplacementOutput> mapOutputs(
       Map<TupleTag<?>, PValue> outputs, PCollectionTuple newOutput) {
     return ReplacementOutputs.tagged(outputs, newOutput);

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactory.java
index 6e0a4fc..cba754e 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactory.java
@@ -31,7 +31,6 @@ import javax.annotation.Nullable;
 import org.apache.beam.runners.core.construction.ReplacementOutputs;
 import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
 import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle;
-import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
 import org.apache.beam.sdk.testing.TestStream;
 import org.apache.beam.sdk.testing.TestStream.ElementEvent;
@@ -170,14 +169,11 @@ class TestStreamEvaluatorFactory implements TransformEvaluatorFactory {
     }
 
     @Override
-    public PTransform<PBegin, PCollection<T>> getReplacementTransform(
-        TestStream<T> transform) {
-      return new DirectTestStream<>(runner, transform);
-    }
-
-    @Override
-    public PBegin getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-      return p.begin();
+    public PTransformReplacement<PBegin, PCollection<T>> getReplacementTransform(
+        AppliedPTransform<PBegin, PCollection<T>, TestStream<T>> transform) {
+      return PTransformReplacement.of(
+          transform.getPipeline().begin(),
+          new DirectTestStream<T>(runner, transform.getTransform()));
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ViewOverrideFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ViewOverrideFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ViewOverrideFactory.java
index 52dc329..d4fd18f 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ViewOverrideFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ViewOverrideFactory.java
@@ -18,14 +18,14 @@
 
 package org.apache.beam.runners.direct;
 
-import com.google.common.collect.Iterables;
 import java.util.Collections;
 import java.util.Map;
 import org.apache.beam.runners.core.construction.ForwardingPTransform;
-import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.sdk.coders.KvCoder;
 import org.apache.beam.sdk.coders.VoidCoder;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.Values;
@@ -43,15 +43,15 @@ import org.apache.beam.sdk.values.TupleTag;
 class ViewOverrideFactory<ElemT, ViewT>
     implements PTransformOverrideFactory<
         PCollection<ElemT>, PCollectionView<ViewT>, CreatePCollectionView<ElemT, ViewT>> {
-  @Override
-  public PTransform<PCollection<ElemT>, PCollectionView<ViewT>> getReplacementTransform(
-      CreatePCollectionView<ElemT, ViewT> transform) {
-    return new GroupAndWriteView<>(transform);
-  }
 
   @Override
-  public PCollection<ElemT> getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-    return (PCollection<ElemT>) Iterables.getOnlyElement(inputs.values());
+  public PTransformReplacement<PCollection<ElemT>, PCollectionView<ViewT>> getReplacementTransform(
+      AppliedPTransform<
+              PCollection<ElemT>, PCollectionView<ViewT>, CreatePCollectionView<ElemT, ViewT>>
+          transform) {
+    return PTransformReplacement.of(
+        PTransformReplacements.getSingletonMainInput(transform),
+        new GroupAndWriteView<>(transform.getTransform()));
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
index b3f92ab..a23ab94 100644
--- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
+++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java
@@ -21,14 +21,14 @@ package org.apache.beam.runners.direct;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Supplier;
 import com.google.common.base.Suppliers;
-import com.google.common.collect.Iterables;
 import java.io.Serializable;
 import java.util.Collections;
 import java.util.Map;
 import java.util.concurrent.ThreadLocalRandom;
-import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.sdk.io.Write;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
@@ -53,14 +53,12 @@ class WriteWithShardingFactory<InputT>
   @VisibleForTesting static final int MIN_SHARDS_FOR_LOG = 3;
 
   @Override
-  public PTransform<PCollection<InputT>, PDone> getReplacementTransform(
-      Write<InputT> transform) {
-    return transform.withSharding(new LogElementShardsWithDrift<InputT>());
-  }
+  public PTransformReplacement<PCollection<InputT>, PDone> getReplacementTransform(
+      AppliedPTransform<PCollection<InputT>, PDone, Write<InputT>> transform) {
 
-  @Override
-  public PCollection<InputT> getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-    return (PCollection<InputT>) Iterables.getOnlyElement(inputs.values());
+    return PTransformReplacement.of(
+        PTransformReplacements.getSingletonMainInput(transform),
+        transform.getTransform().withSharding(new LogElementShardsWithDrift<InputT>()));
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectGroupByKeyOverrideFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectGroupByKeyOverrideFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectGroupByKeyOverrideFactoryTest.java
index c9fdda0..28fef4c 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectGroupByKeyOverrideFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectGroupByKeyOverrideFactoryTest.java
@@ -23,8 +23,11 @@ import static org.junit.Assert.assertThat;
 import org.apache.beam.sdk.coders.KvCoder;
 import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.coders.VarIntCoder;
+import org.apache.beam.sdk.runners.PTransformOverrideFactory.PTransformReplacement;
 import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
 import org.hamcrest.Matchers;
@@ -45,7 +48,12 @@ public class DirectGroupByKeyOverrideFactoryTest {
         p.apply(
             Create.of(KV.of("foo", 1))
                 .withCoder(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of())));
-    PCollection<?> reconstructed = factory.getInput(input.expand(), p);
-    assertThat(reconstructed, Matchers.<PCollection<?>>equalTo(input));
+    PCollection<KV<String, Iterable<Integer>>> grouped =
+        input.apply(GroupByKey.<String, Integer>create());
+    AppliedPTransform<?, ?, ?> producer = DirectGraphs.getProducer(grouped);
+    PTransformReplacement<
+            PCollection<KV<String, Integer>>, PCollection<KV<String, Iterable<Integer>>>>
+        replacement = factory.getReplacementTransform((AppliedPTransform) producer);
+    assertThat(replacement.getInput(), Matchers.<PCollection<?>>equalTo(input));
   }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactoryTest.java
deleted file mode 100644
index 4bbf924..0000000
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiOverrideFactoryTest.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.runners.direct;
-
-import static org.junit.Assert.assertThat;
-
-import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.values.PCollection;
-import org.hamcrest.Matchers;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-/**
- * Tests for {@link ParDoMultiOverrideFactory}.
- */
-@RunWith(JUnit4.class)
-public class ParDoMultiOverrideFactoryTest {
-  private ParDoMultiOverrideFactory factory = new ParDoMultiOverrideFactory();
-
-  @Test
-  public void getInputSucceeds() {
-    TestPipeline p = TestPipeline.create();
-    PCollection<Integer> input = p.apply(Create.of(1, 2, 3));
-    PCollection<?> reconstructed = factory.getInput(input.expand(), p);
-    assertThat(reconstructed, Matchers.<PCollection<?>>equalTo(input));
-  }
-}

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactoryTest.java
index 0d909c2..b9c6e64 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactoryTest.java
@@ -27,22 +27,17 @@ import com.google.common.collect.Iterables;
 import java.util.Collection;
 import java.util.Collections;
 import org.apache.beam.runners.direct.DirectRunner.CommittedBundle;
-import org.apache.beam.runners.direct.TestStreamEvaluatorFactory.DirectTestStreamFactory;
 import org.apache.beam.runners.direct.TestStreamEvaluatorFactory.DirectTestStreamFactory.DirectTestStream;
 import org.apache.beam.runners.direct.TestStreamEvaluatorFactory.TestClock;
 import org.apache.beam.runners.direct.TestStreamEvaluatorFactory.TestStreamIndex;
-import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.VarIntCoder;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.testing.TestStream;
 import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.sdk.values.PBegin;
 import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.TimestampedValue;
-import org.apache.beam.sdk.values.TupleTag;
 import org.hamcrest.Matchers;
 import org.joda.time.Duration;
 import org.joda.time.Instant;
@@ -180,11 +175,4 @@ public class TestStreamEvaluatorFactoryTest {
     assertThat(fifthResult.getWatermarkHold(), equalTo(BoundedWindow.TIMESTAMP_MAX_VALUE));
     assertThat(fifthResult.getUnprocessedElements(), Matchers.emptyIterable());
   }
-
-  @Test
-  public void overrideFactoryGetInputSucceeds() {
-    DirectTestStreamFactory<?> factory = new DirectTestStreamFactory<>(runner);
-    PBegin begin = factory.getInput(Collections.<TupleTag<?>, PValue>emptyMap(), p);
-    assertThat(begin.getPipeline(), Matchers.<Pipeline>equalTo(p));
-  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ViewOverrideFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ViewOverrideFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ViewOverrideFactoryTest.java
index 258cb46..6875e1a 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ViewOverrideFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ViewOverrideFactoryTest.java
@@ -30,12 +30,13 @@ import java.util.Set;
 import java.util.concurrent.atomic.AtomicBoolean;
 import org.apache.beam.runners.direct.ViewOverrideFactory.WriteView;
 import org.apache.beam.sdk.Pipeline.PipelineVisitor;
+import org.apache.beam.sdk.runners.PTransformOverrideFactory.PTransformReplacement;
 import org.apache.beam.sdk.runners.TransformHierarchy.Node;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.View.CreatePCollectionView;
 import org.apache.beam.sdk.util.PCollectionViews;
@@ -62,9 +63,20 @@ public class ViewOverrideFactoryTest implements Serializable {
     PCollection<Integer> ints = p.apply("CreateContents", Create.of(1, 2, 3));
     final PCollectionView<List<Integer>> view =
         PCollectionViews.listView(ints, WindowingStrategy.globalDefault(), ints.getCoder());
-    PTransform<PCollection<Integer>, PCollectionView<List<Integer>>> replacementTransform =
-        factory.getReplacementTransform(CreatePCollectionView.<Integer, List<Integer>>of(view));
-    PCollectionView<List<Integer>> afterReplacement = ints.apply(replacementTransform);
+    PTransformReplacement<PCollection<Integer>, PCollectionView<List<Integer>>>
+        replacementTransform =
+            factory.getReplacementTransform(
+                AppliedPTransform
+                    .<PCollection<Integer>, PCollectionView<List<Integer>>,
+                        CreatePCollectionView<Integer, List<Integer>>>
+                        of(
+                            "foo",
+                            ints.expand(),
+                            view.expand(),
+                            CreatePCollectionView.<Integer, List<Integer>>of(view),
+                            p));
+    PCollectionView<List<Integer>> afterReplacement =
+        ints.apply(replacementTransform.getTransform());
     assertThat(
         "The CreatePCollectionView replacement should return the same View",
         afterReplacement,
@@ -92,9 +104,18 @@ public class ViewOverrideFactoryTest implements Serializable {
     final PCollection<Integer> ints = p.apply("CreateContents", Create.of(1, 2, 3));
     final PCollectionView<List<Integer>> view =
         PCollectionViews.listView(ints, WindowingStrategy.globalDefault(), ints.getCoder());
-    PTransform<PCollection<Integer>, PCollectionView<List<Integer>>> replacement =
-        factory.getReplacementTransform(CreatePCollectionView.<Integer, List<Integer>>of(view));
-    ints.apply(replacement);
+    PTransformReplacement<PCollection<Integer>, PCollectionView<List<Integer>>> replacement =
+        factory.getReplacementTransform(
+            AppliedPTransform
+                .<PCollection<Integer>, PCollectionView<List<Integer>>,
+                    CreatePCollectionView<Integer, List<Integer>>>
+                    of(
+                        "foo",
+                        ints.expand(),
+                        view.expand(),
+                        CreatePCollectionView.<Integer, List<Integer>>of(view),
+                        p));
+    ints.apply(replacement.getTransform());
     final AtomicBoolean writeViewVisited = new AtomicBoolean();
     p.traverseTopologically(
         new PipelineVisitor.Defaults() {
@@ -114,11 +135,4 @@ public class ViewOverrideFactoryTest implements Serializable {
 
     assertThat(writeViewVisited.get(), is(true));
   }
-
-  @Test
-  public void overrideFactoryGetInputSucceeds() {
-    ViewOverrideFactory<String, String> factory = new ViewOverrideFactory<>();
-    PCollection<String> input = p.apply(Create.of("foo", "bar"));
-    assertThat(factory.getInput(input.expand(), p), equalTo(input));
-  }
 }

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WriteWithShardingFactoryTest.java
----------------------------------------------------------------------
diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WriteWithShardingFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WriteWithShardingFactoryTest.java
index 8720fd1..361850d 100644
--- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WriteWithShardingFactoryTest.java
+++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WriteWithShardingFactoryTest.java
@@ -38,11 +38,13 @@ import java.util.List;
 import java.util.UUID;
 import org.apache.beam.runners.direct.WriteWithShardingFactory.CalculateShardsFn;
 import org.apache.beam.sdk.coders.VarLongCoder;
+import org.apache.beam.sdk.coders.VoidCoder;
 import org.apache.beam.sdk.io.Sink;
 import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.io.Write;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.DoFnTester;
@@ -52,7 +54,9 @@ import org.apache.beam.sdk.util.PCollectionViews;
 import org.apache.beam.sdk.util.WindowingStrategy;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionView;
-import org.hamcrest.Matchers;
+import org.apache.beam.sdk.values.PDone;
+import org.apache.beam.sdk.values.PValue;
+import org.apache.beam.sdk.values.TupleTag;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
@@ -118,7 +122,15 @@ public class WriteWithShardingFactoryTest {
   @Test
   public void withNoShardingSpecifiedReturnsNewTransform() {
     Write<Object> original = Write.to(new TestSink());
-    assertThat(factory.getReplacementTransform(original), not(equalTo((Object) original)));
+    PCollection<Object> objs = (PCollection) p.apply(Create.empty(VoidCoder.of()));
+
+    AppliedPTransform<PCollection<Object>, PDone, Write<Object>> originalApplication =
+        AppliedPTransform.of(
+            "write", objs.expand(), Collections.<TupleTag<?>, PValue>emptyMap(), original, p);
+
+    assertThat(
+        factory.getReplacementTransform(originalApplication).getTransform(),
+        not(equalTo((Object) original)));
   }
 
   @Test
@@ -195,13 +207,6 @@ public class WriteWithShardingFactoryTest {
     assertThat(shards, containsInAnyOrder(13));
   }
 
-  @Test
-  public void getInputSucceeds() {
-    PCollection<String> original = p.apply(Create.of("foo"));
-    PCollection<?> input = factory.getInput(original.expand(), p);
-    assertThat(input, Matchers.<PCollection<?>>equalTo(original));
-  }
-
   private static class TestSink extends Sink<Object> {
     @Override
     public void validate(PipelineOptions options) {}

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslator.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslator.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslator.java
index 70da2b3..0459ef7 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslator.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslator.java
@@ -18,11 +18,11 @@
 package org.apache.beam.runners.flink;
 
 import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Iterables;
 import java.util.List;
 import java.util.Map;
 import org.apache.beam.runners.core.SplittableParDo;
 import org.apache.beam.runners.core.construction.PTransformMatchers;
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.runners.core.construction.ReplacementOutputs;
 import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
 import org.apache.beam.sdk.Pipeline;
@@ -30,9 +30,9 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.runners.PTransformOverride;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
 import org.apache.beam.sdk.runners.TransformHierarchy;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.transforms.ParDo.MultiOutput;
 import org.apache.beam.sdk.transforms.View;
 import org.apache.beam.sdk.util.InstanceBuilder;
@@ -221,46 +221,50 @@ class FlinkStreamingPipelineTranslator extends FlinkPipelineTranslator {
   }
 
   private static class ReflectiveOneToOneOverrideFactory<
-      InputT extends PValue,
-      OutputT extends PValue,
-      TransformT extends PTransform<InputT, OutputT>>
-      extends SingleInputOutputOverrideFactory<InputT, OutputT, TransformT> {
-    private final Class<PTransform<InputT, OutputT>> replacement;
+          InputT, OutputT, TransformT extends PTransform<PCollection<InputT>, PCollection<OutputT>>>
+      extends SingleInputOutputOverrideFactory<
+          PCollection<InputT>, PCollection<OutputT>, TransformT> {
+    private final Class<PTransform<PCollection<InputT>, PCollection<OutputT>>> replacement;
     private final FlinkRunner runner;
 
     private ReflectiveOneToOneOverrideFactory(
-        Class<PTransform<InputT, OutputT>> replacement, FlinkRunner runner) {
+        Class<PTransform<PCollection<InputT>, PCollection<OutputT>>> replacement,
+        FlinkRunner runner) {
       this.replacement = replacement;
       this.runner = runner;
     }
 
     @Override
-    public PTransform<InputT, OutputT> getReplacementTransform(TransformT transform) {
-      return InstanceBuilder.ofType(replacement)
-          .withArg(FlinkRunner.class, runner)
-          .withArg((Class<PTransform<InputT, OutputT>>) transform.getClass(), transform)
-          .build();
+    public PTransformReplacement<PCollection<InputT>, PCollection<OutputT>> getReplacementTransform(
+        AppliedPTransform<PCollection<InputT>, PCollection<OutputT>, TransformT> transform) {
+      return PTransformReplacement.of(
+          PTransformReplacements.getSingletonMainInput(transform),
+          InstanceBuilder.ofType(replacement)
+              .withArg(FlinkRunner.class, runner)
+              .withArg(
+                  (Class<PTransform<PCollection<InputT>, PCollection<OutputT>>>)
+                      transform.getTransform().getClass(),
+                  transform.getTransform())
+              .build());
     }
   }
 
   /**
-   * A {@link PTransformOverrideFactory} that overrides a
-   * <a href="https://s.apache.org/splittable-do-fn">Splittable DoFn</a> with
-   * {@link SplittableParDo}.
+   * A {@link PTransformOverrideFactory} that overrides a <a
+   * href="https://s.apache.org/splittable-do-fn">Splittable DoFn</a> with {@link SplittableParDo}.
    */
   static class SplittableParDoOverrideFactory<InputT, OutputT>
       implements PTransformOverrideFactory<
-            PCollection<? extends InputT>, PCollectionTuple, MultiOutput<InputT, OutputT>> {
+          PCollection<InputT>, PCollectionTuple, MultiOutput<InputT, OutputT>> {
     @Override
-    @SuppressWarnings("unchecked")
-    public PTransform<PCollection<? extends InputT>, PCollectionTuple> getReplacementTransform(
-        ParDo.MultiOutput<InputT, OutputT> transform) {
-      return new SplittableParDo(transform);
-    }
-
-    @Override
-    public PCollection<? extends InputT> getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-      return (PCollection<? extends InputT>) Iterables.getOnlyElement(inputs.values());
+    public PTransformReplacement<PCollection<InputT>, PCollectionTuple>
+        getReplacementTransform(
+            AppliedPTransform<
+                    PCollection<InputT>, PCollectionTuple, MultiOutput<InputT, OutputT>>
+                transform) {
+      return PTransformReplacement.of(
+          PTransformReplacements.getSingletonMainInput(transform),
+          new SplittableParDo<>(transform.getTransform()));
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverrides.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverrides.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverrides.java
index 73f3728..119c9c9 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverrides.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverrides.java
@@ -19,19 +19,21 @@ package org.apache.beam.runners.dataflow;
 
 import static com.google.common.base.Preconditions.checkState;
 
-import com.google.common.collect.Iterables;
 import java.util.Map;
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.runners.core.construction.ReplacementOutputs;
 import org.apache.beam.runners.dataflow.BatchViewOverrides.GroupByKeyAndSortValuesOnly;
-import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.InstantCoder;
 import org.apache.beam.sdk.coders.KvCoder;
 import org.apache.beam.sdk.runners.PTransformOverrideFactory;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.GroupByKey;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.ParDo.MultiOutput;
+import org.apache.beam.sdk.transforms.ParDo.SingleOutput;
 import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
 import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
@@ -85,15 +87,15 @@ public class BatchStatefulParDoOverrides {
           ParDo.SingleOutput<KV<K, InputT>, OutputT>> {
 
     @Override
-    @SuppressWarnings("unchecked")
-    public PTransform<PCollection<KV<K, InputT>>, PCollection<OutputT>> getReplacementTransform(
-        ParDo.SingleOutput<KV<K, InputT>, OutputT> originalParDo) {
-      return new StatefulSingleOutputParDo<>(originalParDo);
-    }
-
-    @Override
-    public PCollection<KV<K, InputT>> getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-      return (PCollection<KV<K, InputT>>) Iterables.getOnlyElement(inputs.values());
+    public PTransformReplacement<PCollection<KV<K, InputT>>, PCollection<OutputT>>
+        getReplacementTransform(
+            AppliedPTransform<
+                    PCollection<KV<K, InputT>>, PCollection<OutputT>,
+                    SingleOutput<KV<K, InputT>, OutputT>>
+                transform) {
+      return PTransformReplacement.of(
+          PTransformReplacements.getSingletonMainInput(transform),
+          new StatefulSingleOutputParDo<>(transform.getTransform()));
     }
 
     @Override
@@ -108,15 +110,15 @@ public class BatchStatefulParDoOverrides {
           PCollection<KV<K, InputT>>, PCollectionTuple, ParDo.MultiOutput<KV<K, InputT>, OutputT>> {
 
     @Override
-    @SuppressWarnings("unchecked")
-    public PTransform<PCollection<KV<K, InputT>>, PCollectionTuple> getReplacementTransform(
-        ParDo.MultiOutput<KV<K, InputT>, OutputT> originalParDo) {
-      return new StatefulMultiOutputParDo<>(originalParDo);
-    }
-
-    @Override
-    public PCollection<KV<K, InputT>> getInput(Map<TupleTag<?>, PValue> inputs, Pipeline p) {
-      return (PCollection<KV<K, InputT>>) Iterables.getOnlyElement(inputs.values());
+    public PTransformReplacement<PCollection<KV<K, InputT>>, PCollectionTuple>
+        getReplacementTransform(
+            AppliedPTransform<
+                    PCollection<KV<K, InputT>>, PCollectionTuple,
+                    MultiOutput<KV<K, InputT>, OutputT>>
+                transform) {
+      return PTransformReplacement.of(
+          PTransformReplacements.getSingletonMainInput(transform),
+          new StatefulMultiOutputParDo<>(transform.getTransform()));
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/beam/blob/f3b49605/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java
----------------------------------------------------------------------
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java
index ead2712..1565fd1 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/BatchViewOverrides.java
@@ -42,6 +42,7 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import org.apache.beam.runners.core.construction.PTransformReplacements;
 import org.apache.beam.runners.core.construction.SingleInputOutputOverrideFactory;
 import org.apache.beam.runners.dataflow.internal.IsmFormat;
 import org.apache.beam.runners.dataflow.internal.IsmFormat.IsmRecord;
@@ -59,6 +60,7 @@ import org.apache.beam.sdk.coders.SerializableCoder;
 import org.apache.beam.sdk.coders.StandardCoder;
 import org.apache.beam.sdk.coders.VarIntCoder;
 import org.apache.beam.sdk.coders.VarLongCoder;
+import org.apache.beam.sdk.transforms.AppliedPTransform;
 import org.apache.beam.sdk.transforms.Combine;
 import org.apache.beam.sdk.transforms.Combine.GloballyAsSingletonView;
 import org.apache.beam.sdk.transforms.CombineFnBase.GlobalCombineFn;
@@ -1404,10 +1406,17 @@ class BatchViewOverrides {
     }
 
     @Override
-    public PTransform<PCollection<ElemT>, PCollectionView<ViewT>> getReplacementTransform(
-        final GloballyAsSingletonView<ElemT, ViewT> transform) {
-      return new BatchCombineGloballyAsSingletonView<>(
-          runner, transform.getCombineFn(), transform.getFanout(), transform.getInsertDefault());
+    public PTransformReplacement<PCollection<ElemT>, PCollectionView<ViewT>>
+        getReplacementTransform(
+            AppliedPTransform<
+                    PCollection<ElemT>, PCollectionView<ViewT>,
+                    GloballyAsSingletonView<ElemT, ViewT>>
+                transform) {
+      GloballyAsSingletonView<ElemT, ViewT> combine = transform.getTransform();
+      return PTransformReplacement.of(
+          PTransformReplacements.getSingletonMainInput(transform),
+          new BatchCombineGloballyAsSingletonView<>(
+              runner, combine.getCombineFn(), combine.getFanout(), combine.getInsertDefault()));
     }
 
     private static class BatchCombineGloballyAsSingletonView<ElemT, ViewT>


[50/50] [abbrv] beam git commit: This closes #2672: Merge branch 'master' upto commit 686b774 into jstorm-runner

Posted by ke...@apache.org.
This closes #2672: Merge branch 'master' upto commit 686b774 into jstorm-runner

  [BEAM-1993] Remove special unbounded Flink source/sink
  Remove flink-annotations dependency
  Fix Javadoc warnings on Flink Runner
  Enable flink dependency enforcement and make dependencies explicit
  [BEAM-59] Register standard FileSystems wherever we register IOChannelFactories
  [BEAM-1991] Sum.SumDoubleFn => Sum.ofDoubles
  clean up description for sdk_location
  Set the Project of a Table Reference at Runtime
  Only compile HIFIO ITs when compiling with java 8.
  Update assertions of source_test_utils from camelcase to underscore-separated.
  Add no-else return to pylintrc
  Remove getSideInputWindow
  Remove reference to the isStreaming flag
  Javadoc fixups after style guide changes
  Update Dataflow Worker Version
  [BEAM-1922] Close datasource in JdbcIO when possible
  Fix javadoc warnings
  Add javadoc to getCheckpointMark in UnboundedSource
  Removes final minor usages of OldDoFn outside OldDoFn itself
  [BEAM-1915] Removes use of OldDoFn from Apex
  Update Signature of PTransformOverrideFactory
  [BEAM-1964] Fix lint issues and pylint upgrade
  Rename DoFn.Context#sideOutput to output
  [BEAM-1964] Fix lint issues for linter upgrade -3
  [BEAM-1964] Fix lint issues for linter upgrade -2
  Avoi repackaging bigtable classes in dataflow runner.
  ApexRunner: register standard IOs when deserializing pipeline options
  Add PCollections Utilities
  Free PTransform Names if they are being Replaced
  [BEAM-1347] Update protos related to State API for prototyping purposes.
  Update java8 examples pom files to include maven-shade-plugin.
  fix the simplest typo
  [BEAM-1964] Fix lint issues for linter upgrade
  Merge PR#2423: Add Kubernetes scripts for clusters for Performance and Integration tests of Cassandra and ES for Hadoop Input Format IO
  Remove Triggers.java from SDK entirely
  [BEAM-1708] Improve error message when GCP not installed
  Improve gcloud logging message
  [BEAM-1101, BEAM-1068] Remove service account name credential pipeline options
  Update user_score.py
  Pin versions in tox script
  Improve Empty Create Default Coder Error Message
  Represent a Pipeline via a list of Top-level Transforms
  Test all Known Coders to ensure they Serialize via URN
  [BEAM-1950] Add missing 'static' keyword to MicrobatchSource#initReaderCache
  Move Triggers from sdk-core to runners-core-construction
  [BEAM-1222] Chunk size should be FS dependent
  Move HIFIO k8s scripts into shared dir
  Move jdbc's postgres k8s scripts into shared k8s dir
  Move travis/jenkins folders in a test-infra folder
  [BEAM-911] Mark IO APIs as @Experimental
  Revert "Revert "Revert "Add ValueProvider class for FileBasedSource I/O Transforms"""
  Revert "Throw specialized exception in value providers"
  Removes FlatMapElements.MissingOutputTypeDescriptor
  Removes MapElements.MissingOutputTypeDescriptor
  [BEAM-1882] Update postgres k8 scripts & add scripts for running local dev test
  [BEAM-115] Update timer/state fields on ParDoPayload to use a map field for consistent tag usage
  Use SdkComponents in WindowingStrategy.toProto
  [BEAM-1722] Move PubsubIO into the google-cloud-platform module
  Triggers: handle missing case
  Clean HFIOWithEmbeddedCassandraTest before Execution
  DataflowRunner: remove dead code
  Throw specialized exception in value providers
  DataflowRunner: send windowing strategy using Runner API proto
  DataflowRunner misc cleanups
  Improve Work Rejection handling
  Remove Orderdness of Input, Output expansions
  Ignore more python build artifacts.
  Fix build breaks caused by overlaps between b615013 and c08b7b1
  Remove Jdk1.8-tests/.toDelete
  Improve HadoopInputFormatIO DisplayData and Cassandra tests
  Add Coder utilities for Proto conversions
  Flip dependency edge between Dataflow runner and IO-GCP
  Move HashingFn to io/common, switch to better hash
  PubsubIO: remove support for BoundedReader
  Bump Dataflow worker to 20170410
  Removes DoFn.ProcessContinuation completely
  Move WindowingStrategies to runners-core-construction
  Fix GroupByKeyInputVisitor for Direct Runner
  Skip query metrics when creating a template
  Upgrade dependencies.
  Add SdkComponents
  Create as custom source
  BEAM-1053 ApexGroupByKeyOperator serialization issues
  enable test_multi_valued_singleton_side_input test
  [BEAM-386] Move UnboundedReadFromBoundedSource to core-construction-java
  BEAM-1390 Update top level README.md to include Apex Runner
  better log message for bigquery temp tables
  [BEAM-1921] Expose connection properties in JdbcIO
  [BEAM-1294] Long running UnboundedSource Readers
  [BEAM-1737] Implement a Single-output ParDo as a Multi-output ParDo with a single output.
  Fix for potentially unclosed streams in ApexYarnLauncher
  TestDataflowRunner: better error handling
  BEAM-1887 Switch Apex ParDo to new DoFn.
  Adds tests for the watermark hold (previously untested)
  Fixes SDF issues re: watermarks and stop/resume
  Clarifies doc of ProcessElement re: HasDefaultTracker
  [BEAM-65] Adds HasDefaultTracker for RestrictionTracker inference
  Cleanup: removes two unused constants
  [BEAM-1823] Improve ValidatesRunner Test Log
  Clean up in textio and tfrecordio
  ...


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/58d4b97c
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/58d4b97c
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/58d4b97c

Branch: refs/heads/jstorm-runner
Commit: 58d4b97c0a218d01e1b64d5fced693b15d941074
Parents: f6a89b0 f1e170a
Author: Kenneth Knowles <kl...@google.com>
Authored: Tue Apr 25 10:29:18 2017 -0700
Committer: Kenneth Knowles <kl...@google.com>
Committed: Tue Apr 25 10:29:18 2017 -0700

----------------------------------------------------------------------
 .gitignore                                      |    3 +
 .jenkins/common_job_properties.groovy           |  261 ----
 .../job_beam_PerformanceTests_Dataflow.groovy   |   43 -
 .jenkins/job_beam_PerformanceTests_JDBC.groovy  |   60 -
 .jenkins/job_beam_PerformanceTests_Spark.groovy |   44 -
 ...job_beam_PostCommit_Java_MavenInstall.groovy |   42 -
 ..._PostCommit_Java_ValidatesRunner_Apex.groovy |   48 -
 ...tCommit_Java_ValidatesRunner_Dataflow.groovy |   45 -
 ...PostCommit_Java_ValidatesRunner_Flink.groovy |   43 -
 ...tCommit_Java_ValidatesRunner_Gearpump.groovy |   49 -
 ...PostCommit_Java_ValidatesRunner_Spark.groovy |   44 -
 .../job_beam_PostCommit_Python_Verify.groovy    |   55 -
 .../job_beam_PreCommit_Java_MavenInstall.groovy |   42 -
 .../job_beam_PreCommit_Website_Stage.groovy     |   80 -
 .jenkins/job_beam_PreCommit_Website_Test.groovy |   65 -
 .../job_beam_Release_NightlySnapshot.groovy     |   45 -
 .jenkins/job_seed.groovy                        |   53 -
 .../jenkins/common_job_properties.groovy        |  261 ++++
 .../job_beam_PerformanceTests_Dataflow.groovy   |   43 +
 .../job_beam_PerformanceTests_JDBC.groovy       |   60 +
 .../job_beam_PerformanceTests_Spark.groovy      |   44 +
 ...job_beam_PostCommit_Java_MavenInstall.groovy |   42 +
 ..._PostCommit_Java_ValidatesRunner_Apex.groovy |   48 +
 ...tCommit_Java_ValidatesRunner_Dataflow.groovy |   45 +
 ...PostCommit_Java_ValidatesRunner_Flink.groovy |   43 +
 ...tCommit_Java_ValidatesRunner_Gearpump.groovy |   49 +
 ...PostCommit_Java_ValidatesRunner_Spark.groovy |   44 +
 .../job_beam_PostCommit_Python_Verify.groovy    |   55 +
 .../job_beam_PreCommit_Java_MavenInstall.groovy |   42 +
 .../job_beam_PreCommit_Website_Stage.groovy     |   80 +
 .../job_beam_PreCommit_Website_Test.groovy      |   65 +
 .../job_beam_Release_NightlySnapshot.groovy     |   45 +
 .test-infra/jenkins/job_seed.groovy             |   53 +
 .../cassandra-service-for-local-dev.yaml        |   28 +
 .../cassandra-svc-statefulset.yaml              |  114 ++
 .../LargeITCluster/cassandra-svc-temp.yaml      |   74 +
 .../cassandra/LargeITCluster/data-load.sh       |  122 ++
 .../cassandra/LargeITCluster/show_health.sh     |   47 +
 .../cassandra/LargeITCluster/start-up.sh        |   22 +
 .../cassandra/LargeITCluster/teardown.sh        |   25 +
 .../cassandra-service-for-local-dev.yaml        |   30 +
 .../SmallITCluster/cassandra-svc-rc.yaml        |   74 +
 .../cassandra/SmallITCluster/data-load.sh       |   86 +
 .../cassandra/SmallITCluster/show_health.sh     |   47 +
 .../cassandra/SmallITCluster/start-up.sh        |   23 +
 .../cassandra/SmallITCluster/teardown.sh        |   22 +
 .../kubernetes/cassandra/data-load-setup.sh     |   29 +
 .../elasticsearch-service-for-local-dev.yaml    |   33 +
 .../es-services-deployments.yaml                |  258 +++
 .../LargeProductionCluster/start-up.sh          |   22 +
 .../LargeProductionCluster/teardown.sh          |   21 +
 .../elasticsearch-service-for-local-dev.yaml    |   34 +
 .../SmallITCluster/elasticsearch-svc-rc.yaml    |   96 ++
 .../elasticsearch/SmallITCluster/start-up.sh    |   23 +
 .../elasticsearch/SmallITCluster/teardown.sh    |   21 +
 .../kubernetes/elasticsearch/data-load-setup.sh |   26 +
 .../kubernetes/elasticsearch/data-load.sh       |   33 +
 .../kubernetes/elasticsearch/es_test_data.py    |  299 ++++
 .../kubernetes/elasticsearch/show-health.sh     |   33 +
 .../postgres/postgres-service-for-local-dev.yml |   28 +
 .test-infra/kubernetes/postgres/postgres.yml    |   56 +
 .test-infra/travis/README.md                    |   23 +
 .test-infra/travis/settings.xml                 |   33 +
 .test-infra/travis/test_wordcount.sh            |  125 ++
 .travis.yml                                     |    4 +-
 .travis/README.md                               |   23 -
 .travis/settings.xml                            |   33 -
 .travis/test_wordcount.sh                       |  125 --
 README.md                                       |    1 +
 examples/java8/pom.xml                          |   34 +
 .../beam/examples/MinimalWordCountJava8.java    |    9 +-
 .../beam/examples/complete/game/GameStats.java  |    8 +-
 .../examples/complete/game/LeaderBoard.java     |    2 +-
 .../beam/examples/complete/game/UserScore.java  |    5 +-
 .../examples/MinimalWordCountJava8Test.java     |    9 +-
 .../complete/game/HourlyTeamScoreTest.java      |    5 +-
 .../examples/complete/game/UserScoreTest.java   |    6 +-
 .../apache/beam/runners/apex/ApexRunner.java    |   34 +-
 .../beam/runners/apex/ApexYarnLauncher.java     |  111 +-
 .../translation/ApexPipelineTranslator.java     |    2 +-
 .../FlattenPCollectionTranslator.java           |   13 +-
 .../apex/translation/GroupByKeyTranslator.java  |    4 +-
 .../apex/translation/ParDoTranslator.java       |   30 +-
 .../apex/translation/TranslationContext.java    |   22 +-
 .../translation/WindowAssignTranslator.java     |   58 +-
 .../operators/ApexGroupByKeyOperator.java       |  273 +---
 .../operators/ApexParDoOperator.java            |  235 ++-
 .../operators/ApexProcessFnOperator.java        |  184 +++
 .../translation/utils/ApexStateInternals.java   |   73 +-
 .../apex/translation/utils/NoOpStepContext.java |    2 +-
 .../utils/SerializablePipelineOptions.java      |   13 +-
 .../translation/utils/StateInternalsProxy.java  |   67 +
 .../translation/ApexGroupByKeyOperatorTest.java |    2 +-
 .../apex/translation/ParDoTranslatorTest.java   |   20 +-
 .../utils/ApexStateInternalsTest.java           |   25 +-
 runners/core-construction-java/pom.xml          |   36 +
 .../beam/runners/core/construction/Coders.java  |  174 +++
 .../DeduplicatedFlattenFactory.java             |   79 +-
 .../EmptyFlattenAsCreateFactory.java            |   25 +-
 .../runners/core/construction/PCollections.java |   97 ++
 .../core/construction/PTransformMatchers.java   |    7 +-
 .../construction/PTransformReplacements.java    |   69 +
 .../core/construction/PrimitiveCreate.java      |   18 +-
 .../core/construction/ReplacementOutputs.java   |   63 +-
 .../core/construction/SdkComponents.java        |  159 ++
 .../SingleInputOutputOverrideFactory.java       |   14 +-
 .../runners/core/construction/Triggers.java     |  336 ++++
 .../UnboundedReadFromBoundedSource.java         |  542 +++++++
 .../UnsupportedOverrideFactory.java             |   20 +-
 .../core/construction/WindowingStrategies.java  |  245 +++
 .../runners/core/construction/CodersTest.java   |  163 ++
 .../DeduplicatedFlattenFactoryTest.java         |   24 +-
 .../EmptyFlattenAsCreateFactoryTest.java        |   42 +-
 .../core/construction/PCollectionsTest.java     |  188 +++
 .../construction/PTransformMatchersTest.java    |  132 +-
 .../PTransformReplacementsTest.java             |  131 ++
 .../construction/ReplacementOutputsTest.java    |  109 +-
 .../core/construction/SdkComponentsTest.java    |  157 ++
 .../SingleInputOutputOverrideFactoryTest.java   |   37 +-
 .../runners/core/construction/TriggersTest.java |  111 ++
 .../UnboundedReadFromBoundedSourceTest.java     |  373 +++++
 .../UnsupportedOverrideFactoryTest.java         |   16 +-
 .../construction/WindowingStrategiesTest.java   |  110 ++
 runners/core-java/pom.xml                       |   10 +-
 .../beam/runners/core/AssignWindowsDoFn.java    |   78 -
 .../beam/runners/core/BaseExecutionContext.java |   13 +-
 .../apache/beam/runners/core/DoFnAdapters.java  |  323 ----
 .../apache/beam/runners/core/DoFnRunners.java   |   10 +-
 .../beam/runners/core/ExecutionContext.java     |   13 +-
 .../GroupAlsoByWindowViaOutputBufferDoFn.java   |   19 +-
 .../core/GroupAlsoByWindowViaWindowSetDoFn.java |    9 +-
 .../GroupAlsoByWindowViaWindowSetNewDoFn.java   |   19 +-
 .../core/GroupAlsoByWindowsAggregators.java     |   28 +
 .../runners/core/GroupAlsoByWindowsDoFn.java    |   46 -
 .../core/LateDataDroppingDoFnRunner.java        |    3 +-
 .../org/apache/beam/runners/core/OldDoFn.java   |   41 +-
 ...eBoundedSplittableProcessElementInvoker.java |  133 +-
 .../beam/runners/core/OutputWindowedValue.java  |   10 +-
 .../beam/runners/core/SimpleDoFnRunner.java     |   59 +-
 .../beam/runners/core/SimpleOldDoFnRunner.java  |   63 +-
 .../beam/runners/core/SplittableParDo.java      |   36 +-
 .../core/SplittableProcessElementInvoker.java   |   22 +-
 .../core/UnboundedReadFromBoundedSource.java    |  542 -------
 .../beam/runners/core/WindowingInternals.java   |    8 +-
 .../core/WindowingInternalsAdapters.java        |    8 +-
 .../triggers/AfterWatermarkStateMachine.java    |   14 +-
 ...roupAlsoByWindowViaOutputBufferDoFnTest.java |    4 +-
 .../core/GroupAlsoByWindowsProperties.java      |   37 +-
 .../apache/beam/runners/core/NoOpOldDoFn.java   |    4 +-
 .../apache/beam/runners/core/OldDoFnTest.java   |    4 +-
 ...ndedSplittableProcessElementInvokerTest.java |   27 +-
 .../beam/runners/core/ReduceFnTester.java       |   13 +-
 .../runners/core/SimpleOldDoFnRunnerTest.java   |    4 +-
 .../beam/runners/core/SplittableParDoTest.java  |  238 +--
 .../UnboundedReadFromBoundedSourceTest.java     |  373 -----
 .../direct/BoundedReadEvaluatorFactory.java     |    2 +-
 ...ectGBKIntoKeyedWorkItemsOverrideFactory.java |   16 +-
 .../beam/runners/direct/DirectGraphVisitor.java |    5 +-
 .../direct/DirectGroupByKeyOverrideFactory.java |   14 +-
 .../direct/ExecutorServiceParallelExecutor.java |    4 +-
 .../runners/direct/FlattenEvaluatorFactory.java |    2 +-
 .../GroupAlsoByWindowEvaluatorFactory.java      |   18 +-
 .../direct/GroupByKeyOnlyEvaluatorFactory.java  |    4 +-
 .../direct/KeyedPValueTrackingVisitor.java      |   14 +-
 .../beam/runners/direct/ModelEnforcement.java   |   13 +-
 .../beam/runners/direct/ParDoEvaluator.java     |    4 +-
 .../runners/direct/ParDoEvaluatorFactory.java   |   20 +-
 .../direct/ParDoMultiOverrideFactory.java       |   29 +-
 ...littableProcessElementsEvaluatorFactory.java |    8 +-
 .../direct/StatefulParDoEvaluatorFactory.java   |   10 +-
 .../direct/TestStreamEvaluatorFactory.java      |   20 +-
 .../direct/TransformEvaluatorFactory.java       |   10 +-
 .../direct/TransformExecutorServices.java       |   37 +-
 .../direct/UnboundedReadEvaluatorFactory.java   |    4 +-
 .../runners/direct/ViewEvaluatorFactory.java    |    4 +-
 .../runners/direct/ViewOverrideFactory.java     |   23 +-
 .../beam/runners/direct/WatermarkManager.java   |   19 +-
 .../runners/direct/WindowEvaluatorFactory.java  |    2 +-
 .../direct/WriteWithShardingFactory.java        |   22 +-
 .../runners/direct/DirectGraphVisitorTest.java  |    7 +-
 .../DirectGroupByKeyOverrideFactoryTest.java    |   12 +-
 .../beam/runners/direct/DirectRunnerTest.java   |    5 +-
 .../beam/runners/direct/ParDoEvaluatorTest.java |    8 +-
 .../direct/ParDoMultiOverrideFactoryTest.java   |   45 -
 .../StatefulParDoEvaluatorFactoryTest.java      |    2 +-
 .../direct/TestStreamEvaluatorFactoryTest.java  |   11 -
 .../direct/TransformExecutorServicesTest.java   |   48 +
 .../runners/direct/ViewOverrideFactoryTest.java |   44 +-
 .../direct/WriteWithShardingFactoryTest.java    |   23 +-
 .../examples/streaming/KafkaIOExamples.java     |  338 ----
 .../KafkaWindowedWordCountExample.java          |  164 --
 runners/flink/pom.xml                           |   45 +-
 runners/flink/runner/pom.xml                    |  109 +-
 .../flink/FlinkBatchTransformTranslators.java   |   36 +-
 .../flink/FlinkBatchTranslationContext.java     |   11 +-
 .../flink/FlinkDetachedRunnerResult.java        |    3 +-
 .../flink/FlinkStreamingPipelineTranslator.java |   61 +-
 .../FlinkStreamingTransformTranslators.java     |  139 +-
 .../flink/FlinkStreamingTranslationContext.java |   12 +-
 .../functions/FlinkDoFnFunction.java            |    4 +-
 .../functions/FlinkNoOpStepContext.java         |    2 +-
 .../functions/FlinkStatefulDoFnFunction.java    |    4 +-
 .../types/EncodedValueTypeInformation.java      |    9 -
 .../flink/translation/types/FlinkCoder.java     |   63 -
 .../utils/SerializedPipelineOptions.java        |    2 +
 .../wrappers/streaming/DoFnOperator.java        |   14 +-
 .../streaming/SingletonKeyedWorkItem.java       |    2 -
 .../streaming/SingletonKeyedWorkItemCoder.java  |    4 +-
 .../streaming/SplittableDoFnOperator.java       |   10 +-
 .../wrappers/streaming/WindowDoFnOperator.java  |    7 +-
 .../streaming/io/UnboundedFlinkSink.java        |  200 ---
 .../streaming/io/UnboundedFlinkSource.java      |  120 --
 .../beam/runners/flink/PipelineOptionsTest.java |    2 +-
 .../flink/streaming/DoFnOperatorTest.java       |   34 +-
 runners/google-cloud-dataflow-java/pom.xml      |   25 +-
 .../dataflow/BatchStatefulParDoOverrides.java   |   51 +-
 .../runners/dataflow/BatchViewOverrides.java    |   23 +-
 .../dataflow/DataflowPipelineTranslator.java    |   37 +-
 .../beam/runners/dataflow/DataflowRunner.java   |  146 +-
 .../DataflowUnboundedReadFromBoundedSource.java |  547 -------
 .../dataflow/PrimitiveParDoSingleFactory.java   |   15 +-
 .../dataflow/ReshuffleOverrideFactory.java      |   12 +-
 .../dataflow/StreamingViewOverrides.java        |   14 +-
 .../runners/dataflow/TransformTranslator.java   |    6 +-
 .../dataflow/testing/TestDataflowRunner.java    |   34 +-
 .../dataflow/BatchViewOverridesTest.java        |    4 +-
 .../dataflow/DataflowPipelineJobTest.java       |    7 +-
 ...aflowUnboundedReadFromBoundedSourceTest.java |   79 -
 .../PrimitiveParDoSingleFactoryTest.java        |   59 +-
 .../testing/TestDataflowRunnerTest.java         |    3 +-
 .../apache/beam/runners/spark/SparkRunner.java  |   20 +-
 .../beam/runners/spark/TestSparkRunner.java     |   21 +-
 .../beam/runners/spark/io/MicrobatchSource.java |  113 +-
 .../beam/runners/spark/io/SourceDStream.java    |   11 +-
 .../SparkGroupAlsoByWindowViaWindowSet.java     |   22 +-
 .../spark/stateful/StateSpecFunctions.java      |    6 +-
 .../runners/spark/translation/DoFnFunction.java |  130 --
 .../spark/translation/EvaluationContext.java    |   11 +-
 .../spark/translation/MultiDoFnFunction.java    |    4 +-
 .../spark/translation/SparkAssignWindowFn.java  |    3 +-
 ...SparkGroupAlsoByWindowViaOutputBufferFn.java |   18 +-
 .../spark/translation/SparkProcessContext.java  |    2 +-
 .../spark/translation/SparkRuntimeContext.java  |    2 +
 .../spark/translation/TransformTranslator.java  |   93 +-
 .../streaming/StreamingTransformTranslator.java |   96 +-
 .../spark/util/SparkSideInputReader.java        |    3 +-
 .../ResumeFromCheckpointStreamingTest.java      |   14 +-
 sdks/common/fn-api/pom.xml                      |    5 -
 .../fn-api/src/main/proto/beam_fn_api.proto     |  174 +--
 .../src/main/proto/beam_runner_api.proto        |   14 +-
 sdks/java/core/pom.xml                          |   44 -
 .../main/java/org/apache/beam/sdk/Pipeline.java |   39 +-
 .../java/org/apache/beam/sdk/io/AvroIO.java     |   13 +-
 .../java/org/apache/beam/sdk/io/PubsubIO.java   | 1195 --------------
 .../apache/beam/sdk/io/PubsubUnboundedSink.java |  494 ------
 .../beam/sdk/io/PubsubUnboundedSource.java      | 1463 ------------------
 .../main/java/org/apache/beam/sdk/io/Sink.java  |    2 +-
 .../org/apache/beam/sdk/io/UnboundedSource.java |   13 +
 .../beam/sdk/options/PipelineOptions.java       |   12 +-
 .../sdk/runners/PTransformOverrideFactory.java  |   31 +-
 .../apache/beam/sdk/runners/PipelineRunner.java |    7 +-
 .../beam/sdk/runners/TransformHierarchy.java    |   80 +-
 .../apache/beam/sdk/testing/TestPipeline.java   |    2 +
 .../beam/sdk/transforms/AppliedPTransform.java  |   24 +-
 .../org/apache/beam/sdk/transforms/Combine.java |    4 +-
 .../org/apache/beam/sdk/transforms/Create.java  |    7 +-
 .../org/apache/beam/sdk/transforms/DoFn.java    |   96 +-
 .../apache/beam/sdk/transforms/DoFnTester.java  |   46 +-
 .../beam/sdk/transforms/FlatMapElements.java    |  113 +-
 .../apache/beam/sdk/transforms/GroupByKey.java  |    4 +-
 .../apache/beam/sdk/transforms/MapElements.java |   99 +-
 .../org/apache/beam/sdk/transforms/ParDo.java   |   82 +-
 .../apache/beam/sdk/transforms/Partition.java   |    2 +-
 .../org/apache/beam/sdk/transforms/Sample.java  |    4 +
 .../transforms/join/KeyedPCollectionTuple.java  |   12 +-
 .../reflect/ByteBuddyDoFnInvokerFactory.java    |   47 +-
 .../sdk/transforms/reflect/DoFnInvoker.java     |    4 +-
 .../sdk/transforms/reflect/DoFnSignature.java   |   10 +-
 .../sdk/transforms/reflect/DoFnSignatures.java  |   96 +-
 .../splittabledofn/HasDefaultTracker.java       |   30 +
 .../transforms/splittabledofn/OffsetRange.java  |    8 +-
 .../splittabledofn/OffsetRangeTracker.java      |   33 +-
 .../splittabledofn/RestrictionTracker.java      |    8 +
 .../transforms/windowing/AfterWatermark.java    |   14 +-
 .../beam/sdk/transforms/windowing/Triggers.java |  320 ----
 .../beam/sdk/transforms/windowing/WindowFn.java |   13 -
 .../org/apache/beam/sdk/util/PubsubClient.java  |  544 -------
 .../apache/beam/sdk/util/PubsubGrpcClient.java  |  424 -----
 .../apache/beam/sdk/util/PubsubJsonClient.java  |  317 ----
 .../apache/beam/sdk/util/PubsubTestClient.java  |  436 ------
 .../org/apache/beam/sdk/util/Transport.java     |    3 +-
 .../beam/sdk/util/WindowingStrategies.java      |  267 ----
 .../java/org/apache/beam/sdk/values/PBegin.java |    6 +-
 .../apache/beam/sdk/values/PCollectionList.java |   27 +-
 .../beam/sdk/values/PCollectionTuple.java       |   13 +-
 .../java/org/apache/beam/sdk/values/PDone.java  |    6 +-
 .../java/org/apache/beam/sdk/values/PInput.java |    4 +-
 .../org/apache/beam/sdk/values/POutput.java     |    4 +-
 .../java/org/apache/beam/sdk/values/PValue.java |    4 +-
 .../org/apache/beam/sdk/values/PValueBase.java  |    6 +-
 .../apache/beam/sdk/values/TaggedPValue.java    |    5 +
 .../org/apache/beam/sdk/values/TupleTag.java    |   26 +-
 .../apache/beam/sdk/values/TupleTagList.java    |    2 +-
 .../apache/beam/sdk/values/TypeDescriptors.java |   25 +-
 .../org/apache/beam/sdk/values/TypedPValue.java |    4 +-
 .../java/org/apache/beam/sdk/PipelineTest.java  |  108 +-
 .../org/apache/beam/sdk/io/PubsubIOTest.java    |  197 ---
 .../beam/sdk/io/PubsubUnboundedSinkTest.java    |  190 ---
 .../beam/sdk/io/PubsubUnboundedSourceTest.java  |  411 -----
 .../apache/beam/sdk/metrics/MetricsTest.java    |    2 +-
 .../sdk/runners/TransformHierarchyTest.java     |   72 +-
 .../beam/sdk/testing/StaticWindowsTest.java     |   10 +-
 .../apache/beam/sdk/transforms/CreateTest.java  |    8 +-
 .../beam/sdk/transforms/MapElementsTest.java    |   25 +-
 .../apache/beam/sdk/transforms/ParDoTest.java   |  293 ++--
 .../beam/sdk/transforms/SplittableDoFnTest.java |   58 +-
 .../transforms/reflect/DoFnInvokersTest.java    |  153 +-
 .../DoFnSignaturesProcessElementTest.java       |    2 +-
 .../DoFnSignaturesSplittableDoFnTest.java       |  117 +-
 .../splittabledofn/OffsetRangeTrackerTest.java  |   49 +-
 .../sdk/transforms/windowing/TriggersTest.java  |  100 --
 .../apache/beam/sdk/util/PubsubClientTest.java  |  189 ---
 .../beam/sdk/util/PubsubGrpcClientTest.java     |  207 ---
 .../beam/sdk/util/PubsubJsonClientTest.java     |  140 --
 .../beam/sdk/util/PubsubTestClientTest.java     |  114 --
 .../beam/sdk/util/WindowingStrategiesTest.java  |   91 --
 .../beam/sdk/values/PCollectionListTest.java    |   70 +-
 .../beam/sdk/values/PCollectionTupleTest.java   |   13 +-
 .../apache/beam/sdk/values/TypedPValueTest.java |   46 +-
 .../beam/fn/harness/fake/FakeStepContext.java   |    2 +-
 .../control/ProcessBundleHandlerTest.java       |   30 +-
 sdks/java/io/common/pom.xml                     |    4 +
 .../apache/beam/sdk/io/common/HashingFn.java    |  109 ++
 .../sdk/io/elasticsearch/ElasticsearchIO.java   |    2 +
 sdks/java/io/google-cloud-platform/pom.xml      |   75 +-
 .../sdk/io/gcp/bigquery/BatchLoadBigQuery.java  |    7 +-
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    |   59 +-
 .../io/gcp/bigquery/BigQueryTableSource.java    |   30 +-
 .../sdk/io/gcp/bigquery/WritePartition.java     |    6 +-
 .../beam/sdk/io/gcp/bigquery/WriteResult.java   |   11 +-
 .../beam/sdk/io/gcp/pubsub/PubsubClient.java    |  544 +++++++
 .../sdk/io/gcp/pubsub/PubsubGrpcClient.java     |  424 +++++
 .../apache/beam/sdk/io/gcp/pubsub/PubsubIO.java | 1014 ++++++++++++
 .../sdk/io/gcp/pubsub/PubsubJsonClient.java     |  319 ++++
 .../sdk/io/gcp/pubsub/PubsubTestClient.java     |  436 ++++++
 .../sdk/io/gcp/pubsub/PubsubUnboundedSink.java  |  490 ++++++
 .../io/gcp/pubsub/PubsubUnboundedSource.java    | 1463 ++++++++++++++++++
 .../beam/sdk/io/gcp/pubsub/package-info.java    |   24 +
 .../beam/sdk/io/gcp/GcpApiSurfaceTest.java      |    5 +-
 .../sdk/io/gcp/bigquery/BigQueryIOTest.java     |    4 +-
 .../sdk/io/gcp/pubsub/PubsubClientTest.java     |  189 +++
 .../sdk/io/gcp/pubsub/PubsubGrpcClientTest.java |  208 +++
 .../beam/sdk/io/gcp/pubsub/PubsubIOTest.java    |  189 +++
 .../sdk/io/gcp/pubsub/PubsubJsonClientTest.java |  139 ++
 .../sdk/io/gcp/pubsub/PubsubTestClientTest.java |  114 ++
 .../io/gcp/pubsub/PubsubUnboundedSinkTest.java  |  188 +++
 .../gcp/pubsub/PubsubUnboundedSourceTest.java   |  409 +++++
 .../hadoop/inputformat/HadoopInputFormatIO.java |   36 +-
 .../inputformat/HadoopInputFormatIOTest.java    |   99 +-
 sdks/java/io/hadoop/jdk1.8-tests/pom.xml        |   46 +-
 .../inputformat/HIFIOWithElasticTest.java       |    6 +-
 .../HIFIOWithEmbeddedCassandraTest.java         |  215 +++
 .../hadoop/inputformat/hashing/HashingFn.java   |  109 --
 .../integration/tests/HIFIOCassandraIT.java     |    8 +-
 .../integration/tests/HIFIOElasticIT.java       |    6 +-
 .../SmallITCluster/cassandra-svc-rc.yaml        |   88 --
 .../cassandra/SmallITCluster/start-up.sh        |   21 -
 .../cassandra/SmallITCluster/teardown.sh        |   21 -
 .../kubernetes/cassandra/data-load-setup.sh     |   29 -
 .../resources/kubernetes/cassandra/data-load.sh |   67 -
 .../LargeProductionCluster/es-services.yaml     |  277 ----
 .../LargeProductionCluster/start-up.sh          |   21 -
 .../LargeProductionCluster/teardown.sh          |   20 -
 .../SmallITCluster/elasticsearch-svc-rc.yaml    |   84 -
 .../elasticsearch/SmallITCluster/start-up.sh    |   22 -
 .../elasticsearch/SmallITCluster/teardown.sh    |   20 -
 .../kubernetes/elasticsearch/data-load-setup.sh |   26 -
 .../kubernetes/elasticsearch/data-load.sh       |   33 -
 .../kubernetes/elasticsearch/es_test_data.py    |  299 ----
 .../kubernetes/elasticsearch/show-health.sh     |   25 -
 sdks/java/io/hadoop/pom.xml                     |   12 +-
 .../apache/beam/sdk/io/hdfs/HDFSFileSink.java   |    2 +
 .../apache/beam/sdk/io/hdfs/HDFSFileSource.java |    2 +
 .../org/apache/beam/sdk/io/jdbc/JdbcIO.java     |   61 +-
 .../org/apache/beam/sdk/io/jdbc/JdbcIOTest.java |   10 +-
 .../kubernetes/postgres-pod-no-vol.yml          |   32 -
 .../kubernetes/postgres-service-public.yml      |   28 -
 .../jdbc/src/test/resources/kubernetes/setup.sh |   20 -
 .../src/test/resources/kubernetes/teardown.sh   |   20 -
 .../java/org/apache/beam/sdk/io/jms/JmsIO.java  |    2 +
 .../org/apache/beam/sdk/io/kafka/KafkaIO.java   |    2 +
 .../apache/beam/sdk/io/kinesis/KinesisIO.java   |    2 +
 .../beam/sdk/io/mongodb/MongoDbGridFSIO.java    |    9 +-
 .../apache/beam/sdk/io/mongodb/MongoDbIO.java   |    2 +
 .../org/apache/beam/sdk/io/mqtt/MqttIO.java     |    2 +
 .../transforms/FlatMapElementsJava8Test.java    |   10 +-
 .../sdk/transforms/MapElementsJava8Test.java    |   10 +-
 sdks/python/.pylintrc                           |    6 +-
 sdks/python/apache_beam/coders/coder_impl.py    |   50 +-
 sdks/python/apache_beam/coders/coders.py        |    6 +-
 .../examples/complete/game/user_score.py        |    8 +-
 .../examples/complete/top_wikipedia_sessions.py |    8 -
 .../examples/cookbook/group_with_coder.py       |    6 +-
 .../examples/snippets/snippets_test.py          |    3 +-
 sdks/python/apache_beam/examples/wordcount.py   |   48 +-
 .../apache_beam/examples/wordcount_debugging.py |    4 -
 sdks/python/apache_beam/internal/gcp/auth.py    |   80 +-
 .../apache_beam/internal/gcp/auth_test.py       |   44 -
 .../apache_beam/internal/gcp/json_value.py      |    6 -
 sdks/python/apache_beam/internal/pickler.py     |   20 +-
 sdks/python/apache_beam/io/avroio_test.py       |   14 +-
 sdks/python/apache_beam/io/concat_source.py     |   74 +-
 .../python/apache_beam/io/concat_source_test.py |   12 +-
 sdks/python/apache_beam/io/filebasedsource.py   |   54 +-
 .../apache_beam/io/filebasedsource_test.py      |   26 +-
 sdks/python/apache_beam/io/fileio.py            |   72 +-
 sdks/python/apache_beam/io/fileio_test.py       |   45 +-
 sdks/python/apache_beam/io/filesystem.py        |    3 +-
 sdks/python/apache_beam/io/filesystem_test.py   |  242 ++-
 sdks/python/apache_beam/io/filesystems_util.py  |   10 +-
 sdks/python/apache_beam/io/gcp/bigquery.py      |   38 +-
 .../io/gcp/datastore/v1/datastoreio.py          |   24 +-
 .../io/gcp/datastore/v1/datastoreio_test.py     |    4 +-
 .../apache_beam/io/gcp/datastore/v1/helper.py   |   16 +-
 .../io/gcp/datastore/v1/query_splitter.py       |    2 +-
 sdks/python/apache_beam/io/gcp/gcsfilesystem.py |    7 +-
 .../io/gcp/tests/bigquery_matcher.py            |    3 +-
 sdks/python/apache_beam/io/iobase.py            |    7 +-
 sdks/python/apache_beam/io/localfilesystem.py   |    3 +-
 sdks/python/apache_beam/io/range_trackers.py    |   19 +-
 sdks/python/apache_beam/io/source_test_utils.py |   79 +-
 .../apache_beam/io/source_test_utils_test.py    |   20 +-
 sdks/python/apache_beam/io/textio.py            |   16 +-
 sdks/python/apache_beam/io/textio_test.py       |   18 +-
 sdks/python/apache_beam/io/tfrecordio.py        |   12 +-
 sdks/python/apache_beam/metrics/cells.py        |   28 +-
 sdks/python/apache_beam/metrics/execution.py    |    3 +-
 sdks/python/apache_beam/metrics/metric.py       |    9 +-
 sdks/python/apache_beam/pipeline.py             |    4 +-
 sdks/python/apache_beam/pipeline_test.py        |   13 +-
 sdks/python/apache_beam/pvalue.py               |   16 +-
 sdks/python/apache_beam/runners/common.py       |    9 +-
 .../runners/dataflow/dataflow_metrics_test.py   |    3 +-
 .../runners/dataflow/dataflow_runner.py         |   39 +-
 .../runners/dataflow/dataflow_runner_test.py    |    2 +-
 .../runners/dataflow/internal/apiclient.py      |    7 +-
 .../runners/dataflow/internal/dependency.py     |    6 +-
 .../runners/dataflow/native_io/iobase_test.py   |    2 +-
 .../runners/dataflow/test_dataflow_runner.py    |   14 +-
 .../runners/direct/bundle_factory.py            |   14 +-
 .../consumer_tracking_pipeline_visitor_test.py  |   22 +-
 .../apache_beam/runners/direct/direct_runner.py |   27 +-
 .../runners/direct/evaluation_context.py        |   10 +-
 .../apache_beam/runners/direct/executor.py      |    9 +-
 .../runners/direct/transform_evaluator.py       |   38 -
 sdks/python/apache_beam/runners/runner.py       |   80 +-
 sdks/python/apache_beam/runners/runner_test.py  |   41 +
 .../apache_beam/tests/pipeline_verifiers.py     |    7 +-
 sdks/python/apache_beam/transforms/combiners.py |   62 +-
 .../apache_beam/transforms/combiners_test.py    |    4 +-
 sdks/python/apache_beam/transforms/core.py      |  192 ++-
 .../apache_beam/transforms/create_test.py       |  121 ++
 sdks/python/apache_beam/transforms/display.py   |    4 +-
 .../apache_beam/transforms/display_test.py      |   36 -
 .../python/apache_beam/transforms/ptransform.py |   38 +-
 .../apache_beam/transforms/ptransform_test.py   |   17 +-
 .../python/apache_beam/transforms/sideinputs.py |   11 +-
 .../apache_beam/transforms/sideinputs_test.py   |    6 +-
 sdks/python/apache_beam/transforms/trigger.py   |   26 +-
 .../apache_beam/transforms/trigger_test.py      |    6 +-
 sdks/python/apache_beam/typehints/decorators.py |   26 +-
 .../apache_beam/typehints/trivial_inference.py  |   26 +-
 .../typehints/trivial_inference_test.py         |    3 +-
 sdks/python/apache_beam/typehints/typecheck.py  |    7 +-
 sdks/python/apache_beam/typehints/typehints.py  |   66 +-
 .../apache_beam/typehints/typehints_test.py     |    7 +-
 .../apache_beam/utils/annotations_test.py       |    2 +-
 sdks/python/apache_beam/utils/path.py           |    3 +-
 .../apache_beam/utils/pipeline_options.py       |  109 +-
 .../apache_beam/utils/pipeline_options_test.py  |   52 +-
 sdks/python/apache_beam/utils/proto_utils.py    |   15 +-
 sdks/python/apache_beam/utils/retry.py          |   14 +-
 sdks/python/apache_beam/utils/timestamp.py      |    6 +-
 sdks/python/apache_beam/utils/value_provider.py |  110 --
 .../apache_beam/utils/value_provider_test.py    |  165 --
 sdks/python/apache_beam/utils/windowed_value.py |   17 +-
 sdks/python/run_postcommit.sh                   |    6 +-
 sdks/python/run_pylint.sh                       |    2 +-
 sdks/python/setup.py                            |    8 +-
 sdks/python/tox.ini                             |   18 +-
 490 files changed, 17307 insertions(+), 17743 deletions(-)
----------------------------------------------------------------------



[43/50] [abbrv] beam git commit: Fix Javadoc warnings on Flink Runner

Posted by ke...@apache.org.
Fix Javadoc warnings on Flink Runner


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/5fce8d2a
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/5fce8d2a
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/5fce8d2a

Branch: refs/heads/jstorm-runner
Commit: 5fce8d2a27e4507012e8e61a1487f51d3293d9d9
Parents: f654ff4
Author: Isma�l Mej�a <ie...@apache.org>
Authored: Tue Apr 18 14:36:44 2017 +0200
Committer: Isma�l Mej�a <ie...@apache.org>
Committed: Tue Apr 18 16:12:47 2017 +0200

----------------------------------------------------------------------
 .../translation/wrappers/streaming/SingletonKeyedWorkItem.java    | 2 --
 .../wrappers/streaming/SingletonKeyedWorkItemCoder.java           | 2 --
 .../flink/translation/wrappers/streaming/WindowDoFnOperator.java  | 3 ---
 3 files changed, 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/5fce8d2a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItem.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItem.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItem.java
index b85efef..e843660 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItem.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItem.java
@@ -24,8 +24,6 @@ import org.apache.beam.sdk.util.WindowedValue;
 
 /**
  * Singleton keyed word item.
- * @param <K>
- * @param <ElemT>
  */
 public class SingletonKeyedWorkItem<K, ElemT> implements KeyedWorkItem<K, ElemT> {
 

http://git-wip-us.apache.org/repos/asf/beam/blob/5fce8d2a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItemCoder.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItemCoder.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItemCoder.java
index fe96eb1..9a52330 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItemCoder.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItemCoder.java
@@ -37,8 +37,6 @@ import org.apache.beam.sdk.util.WindowedValue;
 
 /**
  * Singleton keyed work item coder.
- * @param <K>
- * @param <ElemT>
  */
 public class SingletonKeyedWorkItemCoder<K, ElemT>
     extends StandardCoder<SingletonKeyedWorkItem<K, ElemT>> {

http://git-wip-us.apache.org/repos/asf/beam/blob/5fce8d2a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java
index 8bbc6ef..7b899f4 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java
@@ -44,9 +44,6 @@ import org.apache.flink.streaming.api.operators.InternalTimer;
 
 /**
  * Flink operator for executing window {@link DoFn DoFns}.
- *
- * @param <InputT>
- * @param <OutputT>
  */
 public class WindowDoFnOperator<K, InputT, OutputT>
     extends DoFnOperator<KeyedWorkItem<K, InputT>, KV<K, OutputT>, WindowedValue<KV<K, OutputT>>> {


[31/50] [abbrv] beam git commit: Update assertions of source_test_utils from camelcase to underscore-separated.

Posted by ke...@apache.org.
Update assertions of source_test_utils from camelcase to underscore-separated.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/1f66fbdc
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/1f66fbdc
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/1f66fbdc

Branch: refs/heads/jstorm-runner
Commit: 1f66fbdce2187008c9e0ab535f0de3d69146d48b
Parents: 85cfd0c
Author: chamikara@google.com <ch...@google.com>
Authored: Thu Apr 13 18:57:04 2017 -0700
Committer: chamikara@google.com <ch...@google.com>
Committed: Mon Apr 17 15:01:21 2017 -0700

----------------------------------------------------------------------
 sdks/python/apache_beam/io/avroio_test.py       | 14 ++--
 .../python/apache_beam/io/concat_source_test.py | 12 ++--
 sdks/python/apache_beam/io/source_test_utils.py | 72 ++++++++++----------
 .../apache_beam/io/source_test_utils_test.py    | 20 +++---
 sdks/python/apache_beam/io/textio_test.py       | 18 ++---
 .../apache_beam/transforms/create_test.py       | 18 ++---
 6 files changed, 76 insertions(+), 78 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/1f66fbdc/sdks/python/apache_beam/io/avroio_test.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/avroio_test.py b/sdks/python/apache_beam/io/avroio_test.py
index 8b14443..5f2db62 100644
--- a/sdks/python/apache_beam/io/avroio_test.py
+++ b/sdks/python/apache_beam/io/avroio_test.py
@@ -142,10 +142,10 @@ class TestAvro(unittest.TestCase):
           (split.source, split.start_position, split.stop_position)
           for split in splits
       ]
-      source_test_utils.assertSourcesEqualReferenceSource((source, None, None),
-                                                          sources_info)
+      source_test_utils.assert_sources_equal_reference_source(
+          (source, None, None), sources_info)
     else:
-      read_records = source_test_utils.readFromSource(source, None, None)
+      read_records = source_test_utils.read_from_source(source, None, None)
       self.assertItemsEqual(expected_result, read_records)
 
   def test_read_without_splitting(self):
@@ -228,7 +228,7 @@ class TestAvro(unittest.TestCase):
   def test_read_reentrant_without_splitting(self):
     file_name = self._write_data()
     source = AvroSource(file_name)
-    source_test_utils.assertReentrantReadsSucceed((source, None, None))
+    source_test_utils.assert_reentrant_reads_succeed((source, None, None))
 
   def test_read_reantrant_with_splitting(self):
     file_name = self._write_data()
@@ -236,7 +236,7 @@ class TestAvro(unittest.TestCase):
     splits = [
         split for split in source.split(desired_bundle_size=100000)]
     assert len(splits) == 1
-    source_test_utils.assertReentrantReadsSucceed(
+    source_test_utils.assert_reentrant_reads_succeed(
         (splits[0].source, splits[0].start_position, splits[0].stop_position))
 
   def test_read_without_splitting_multiple_blocks(self):
@@ -322,7 +322,7 @@ class TestAvro(unittest.TestCase):
       splits = [split
                 for split in source.split(desired_bundle_size=float('inf'))]
       assert len(splits) == 1
-      source_test_utils.assertSplitAtFractionExhaustive(splits[0].source)
+      source_test_utils.assert_split_at_fraction_exhaustive(splits[0].source)
     finally:
       avro.datafile.SYNC_INTERVAL = old_sync_interval
 
@@ -343,7 +343,7 @@ class TestAvro(unittest.TestCase):
 
     source = AvroSource(corrupted_file_name)
     with self.assertRaises(ValueError) as exn:
-      source_test_utils.readFromSource(source, None, None)
+      source_test_utils.read_from_source(source, None, None)
       self.assertEqual(0, exn.exception.message.find('Unexpected sync marker'))
 
   def test_source_transform(self):

http://git-wip-us.apache.org/repos/asf/beam/blob/1f66fbdc/sdks/python/apache_beam/io/concat_source_test.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/concat_source_test.py b/sdks/python/apache_beam/io/concat_source_test.py
index 2cc4684..77d2647 100644
--- a/sdks/python/apache_beam/io/concat_source_test.py
+++ b/sdks/python/apache_beam/io/concat_source_test.py
@@ -83,7 +83,7 @@ class RangeSource(iobase.BoundedSource):
 class ConcatSourceTest(unittest.TestCase):
 
   def test_range_source(self):
-    source_test_utils.assertSplitAtFractionExhaustive(RangeSource(0, 10, 3))
+    source_test_utils.assert_split_at_fraction_exhaustive(RangeSource(0, 10, 3))
 
   def test_conact_source(self):
     source = ConcatSource([RangeSource(0, 4),
@@ -157,7 +157,7 @@ class ConcatSourceTest(unittest.TestCase):
     self.assertEquals(range_tracker.position_at_fraction(1), (3, None))
 
   def test_empty_source(self):
-    read_all = source_test_utils.readFromSource
+    read_all = source_test_utils.read_from_source
 
     empty = RangeSource(0, 0)
     self.assertEquals(read_all(ConcatSource([])), [])
@@ -174,7 +174,7 @@ class ConcatSourceTest(unittest.TestCase):
                       [])
 
   def test_single_source(self):
-    read_all = source_test_utils.readFromSource
+    read_all = source_test_utils.read_from_source
 
     range10 = RangeSource(0, 10)
     self.assertEquals(read_all(ConcatSource([range10])), range(10))
@@ -183,7 +183,7 @@ class ConcatSourceTest(unittest.TestCase):
                       range(5))
 
   def test_source_with_empty_ranges(self):
-    read_all = source_test_utils.readFromSource
+    read_all = source_test_utils.read_from_source
 
     empty = RangeSource(0, 0)
     self.assertEquals(read_all(empty), [])
@@ -206,7 +206,7 @@ class ConcatSourceTest(unittest.TestCase):
                            RangeSource(13, 17),
                            empty,
                           ])
-    source_test_utils.assertSplitAtFractionExhaustive(source)
+    source_test_utils.assert_split_at_fraction_exhaustive(source)
 
   def test_run_concat_direct(self):
     source = ConcatSource([RangeSource(0, 10),
@@ -224,7 +224,7 @@ class ConcatSourceTest(unittest.TestCase):
                            RangeSource(100, 110),
                            RangeSource(1000, 1010),
                           ])
-    source_test_utils.assertSplitAtFractionExhaustive(source)
+    source_test_utils.assert_split_at_fraction_exhaustive(source)
 
 if __name__ == '__main__':
   logging.getLogger().setLevel(logging.INFO)

http://git-wip-us.apache.org/repos/asf/beam/blob/1f66fbdc/sdks/python/apache_beam/io/source_test_utils.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/source_test_utils.py b/sdks/python/apache_beam/io/source_test_utils.py
index 542e9f6..5584fa7 100644
--- a/sdks/python/apache_beam/io/source_test_utils.py
+++ b/sdks/python/apache_beam/io/source_test_utils.py
@@ -67,7 +67,7 @@ SplitFractionStatistics = namedtuple(
     'successful_fractions non_trivial_fractions')
 
 
-def readFromSource(source, start_position=None, stop_position=None):
+def read_from_source(source, start_position=None, stop_position=None):
   """Reads elements from the given ```BoundedSource```.
 
   Only reads elements within the given position range.
@@ -97,7 +97,7 @@ def _ThreadPool(threads):
   return ThreadPool(threads)
 
 
-def assertSourcesEqualReferenceSource(reference_source_info, sources_info):
+def assert_sources_equal_reference_source(reference_source_info, sources_info):
   """Tests if a reference source is equal to a given set of sources.
 
   Given a reference source (a ``BoundedSource`` and a position range) and a
@@ -125,7 +125,7 @@ def assertSourcesEqualReferenceSource(reference_source_info, sources_info):
                      'item of the tuple gives a '
                      'iobase.BoundedSource. Received: %r'
                      , reference_source_info)
-  reference_records = readFromSource(
+  reference_records = read_from_source(
       *reference_source_info)
 
   source_records = []
@@ -147,7 +147,7 @@ def assertSourcesEqualReferenceSource(reference_source_info, sources_info):
           reference_source_info[0], source_info[0],
           type(reference_source_info[0].default_output_coder()),
           type(source_info[0].default_output_coder()))
-    source_records.extend(readFromSource(*source_info))
+    source_records.extend(read_from_source(*source_info))
 
   if len(reference_records) != len(source_records):
     raise ValueError(
@@ -161,7 +161,7 @@ def assertSourcesEqualReferenceSource(reference_source_info, sources_info):
         'same set of records.')
 
 
-def assertReentrantReadsSucceed(source_info):
+def assert_reentrant_reads_succeed(source_info):
   """Tests if a given source can be read in a reentrant manner.
 
   Assume that given source produces the set of values {v1, v2, v3, ... vn}. For
@@ -216,8 +216,8 @@ def assertReentrantReadsSucceed(source_info):
                        i, expected_values, reentrant_read)
 
 
-def assertSplitAtFractionBehavior(source, num_items_to_read_before_split,
-                                  split_fraction, expected_outcome):
+def assert_split_at_fraction_behavior(source, num_items_to_read_before_split,
+                                      split_fraction, expected_outcome):
   """Verifies the behaviour of splitting a source at a given fraction.
 
   Asserts that splitting a ``BoundedSource`` either fails after reading
@@ -237,13 +237,13 @@ def assertSplitAtFractionBehavior(source, num_items_to_read_before_split,
     source while the second value of the tuple will be '-1'.
   """
   assert isinstance(source, iobase.BoundedSource)
-  expected_items = readFromSource(source, None, None)
-  return _assertSplitAtFractionBehavior(
+  expected_items = read_from_source(source, None, None)
+  return _assert_split_at_fraction_behavior(
       source, expected_items, num_items_to_read_before_split, split_fraction,
       expected_outcome)
 
 
-def _assertSplitAtFractionBehavior(
+def _assert_split_at_fraction_behavior(
     source, expected_items, num_items_to_read_before_split,
     split_fraction, expected_outcome, start_position=None, stop_position=None):
 
@@ -307,7 +307,7 @@ def _assertSplitAtFractionBehavior(
   residual_range = (
       split_result[0], stop_position_before_split) if split_result else None
 
-  return _verifySingleSplitFractionResult(
+  return _verify_single_split_fraction_result(
       source, expected_items, current_items,
       split_result,
       (range_tracker.start_position(), range_tracker.stop_position()),
@@ -318,19 +318,19 @@ def _range_to_str(start, stop):
   return '[' + (str(start) + ',' + str(stop) + ')')
 
 
-def _verifySingleSplitFractionResult(
+def _verify_single_split_fraction_result(
     source, expected_items, current_items, split_successful, primary_range,
     residual_range, split_fraction):
 
   assert primary_range
-  primary_items = readFromSource(source, *primary_range)
+  primary_items = read_from_source(source, *primary_range)
 
   if not split_successful:
     # For unsuccessful splits, residual_range should be None.
     assert not residual_range
 
   residual_items = (
-      readFromSource(source, *residual_range)
+      read_from_source(source, *residual_range)
       if split_successful else [])
 
   total_items = primary_items + residual_items
@@ -359,9 +359,8 @@ def _verifySingleSplitFractionResult(
   return result
 
 
-def assertSplitAtFractionSucceedsAndConsistent(source,
-                                               num_items_to_read_before_split,
-                                               split_fraction):
+def assert_split_at_fraction_succeeds_and_consistent(
+    source, num_items_to_read_before_split, split_fraction):
   """Verifies some consistency properties of dynamic work rebalancing.
 
   Equivalent to the following pseudocode:::
@@ -392,13 +391,13 @@ def assertSplitAtFractionSucceedsAndConsistent(source,
     split_fraction: fraction to split at.
   """
 
-  assertSplitAtFractionBehavior(
+  assert_split_at_fraction_behavior(
       source, num_items_to_read_before_split, split_fraction,
       ExpectedSplitOutcome.MUST_SUCCEED_AND_BE_CONSISTENT)
 
 
-def assertSplitAtFractionFails(source, num_items_to_read_before_split,
-                               split_fraction):
+def assert_split_at_fraction_fails(source, num_items_to_read_before_split,
+                                   split_fraction):
   """Asserts that dynamic work rebalancing at a given fraction fails.
 
   Asserts that trying to perform dynamic splitting after reading
@@ -410,16 +409,15 @@ def assertSplitAtFractionFails(source, num_items_to_read_before_split,
     split_fraction: fraction to split at.
   """
 
-  assertSplitAtFractionBehavior(
+  assert_split_at_fraction_behavior(
       source, num_items_to_read_before_split, split_fraction,
       ExpectedSplitOutcome.MUST_FAIL)
 
 
-def assertSplitAtFractionBinary(source, expected_items,
-                                num_items_to_read_before_split, left_fraction,
-                                left_result,
-                                right_fraction, right_result, stats,
-                                start_position=None, stop_position=None):
+def assert_split_at_fraction_binary(
+    source, expected_items, num_items_to_read_before_split, left_fraction,
+    left_result, right_fraction, right_result, stats, start_position=None,
+    stop_position=None):
   """Performs dynamic work rebalancing for fractions within a given range.
 
   Asserts that given a start position, a source can be split at every
@@ -445,16 +443,16 @@ def assertSplitAtFractionBinary(source, expected_items,
   middle_fraction = (left_fraction + right_fraction) / 2
 
   if left_result is None:
-    left_result = _assertSplitAtFractionBehavior(
+    left_result = _assert_split_at_fraction_behavior(
         source, expected_items, num_items_to_read_before_split, left_fraction,
         ExpectedSplitOutcome.MUST_BE_CONSISTENT_IF_SUCCEEDS)
 
   if right_result is None:
-    right_result = _assertSplitAtFractionBehavior(
+    right_result = _assert_split_at_fraction_behavior(
         source, expected_items, num_items_to_read_before_split,
         right_fraction, ExpectedSplitOutcome.MUST_BE_CONSISTENT_IF_SUCCEEDS)
 
-  middle_result = _assertSplitAtFractionBehavior(
+  middle_result = _assert_split_at_fraction_behavior(
       source, expected_items, num_items_to_read_before_split, middle_fraction,
       ExpectedSplitOutcome.MUST_BE_CONSISTENT_IF_SUCCEEDS)
 
@@ -468,7 +466,7 @@ def assertSplitAtFractionBinary(source, expected_items,
   # enough since the total number of records is constant).
 
   if left_result[0] != middle_result[0]:
-    assertSplitAtFractionBinary(
+    assert_split_at_fraction_binary(
         source, expected_items, num_items_to_read_before_split, left_fraction,
         left_result, middle_fraction, middle_result, stats)
 
@@ -477,7 +475,7 @@ def assertSplitAtFractionBinary(source, expected_items,
   # fraction 1.0, there might be fractions in range ('middle_fraction', 1.0)
   # where dynamic splitting succeeds).
   if right_fraction == 1.0 or middle_result[0] != right_result[0]:
-    assertSplitAtFractionBinary(
+    assert_split_at_fraction_binary(
         source, expected_items, num_items_to_read_before_split,
         middle_fraction, middle_result, right_fraction, right_result, stats)
 
@@ -485,7 +483,7 @@ MAX_CONCURRENT_SPLITTING_TRIALS_PER_ITEM = 100
 MAX_CONCURRENT_SPLITTING_TRIALS_TOTAL = 1000
 
 
-def assertSplitAtFractionExhaustive(
+def assert_split_at_fraction_exhaustive(
     source, start_position=None, stop_position=None,
     perform_multi_threaded_test=True):
   """Performs and tests dynamic work rebalancing exhaustively.
@@ -504,7 +502,7 @@ def assertSplitAtFractionExhaustive(
     ValueError: if the exhaustive splitting test fails.
   """
 
-  expected_items = readFromSource(source, start_position, stop_position)
+  expected_items = read_from_source(source, start_position, stop_position)
   if not expected_items:
     raise ValueError('Source %r is empty.', source)
 
@@ -519,7 +517,7 @@ def assertSplitAtFractionExhaustive(
   for i in range(len(expected_items)):
     stats = SplitFractionStatistics([], [])
 
-    assertSplitAtFractionBinary(
+    assert_split_at_fraction_binary(
         source, expected_items, i, 0.0, None, 1.0, None, stats)
 
     if stats.successful_fractions:
@@ -571,7 +569,7 @@ def assertSplitAtFractionExhaustive(
           )
           break
 
-        if _assertSplitAtFractionConcurrent(
+        if _assert_split_at_fraction_concurrent(
             source, expected_items, i, min_non_trivial_fraction, thread_pool):
           have_success = True
         else:
@@ -595,7 +593,7 @@ def assertSplitAtFractionExhaustive(
                num_total_trials, len(expected_items))
 
 
-def _assertSplitAtFractionConcurrent(
+def _assert_split_at_fraction_concurrent(
     source, expected_items, num_items_to_read_before_splitting,
     split_fraction, thread_pool=None):
 
@@ -634,7 +632,7 @@ def _assertSplitAtFractionConcurrent(
   residual_range = (
       split_result[0], stop_position_before_split) if split_result else None
 
-  res = _verifySingleSplitFractionResult(
+  res = _verify_single_split_fraction_result(
       source, expected_items, current_items, split_result,
       primary_range, residual_range, split_fraction)
 

http://git-wip-us.apache.org/repos/asf/beam/blob/1f66fbdc/sdks/python/apache_beam/io/source_test_utils_test.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/source_test_utils_test.py b/sdks/python/apache_beam/io/source_test_utils_test.py
index f6f9ec3..b822fc5 100644
--- a/sdks/python/apache_beam/io/source_test_utils_test.py
+++ b/sdks/python/apache_beam/io/source_test_utils_test.py
@@ -48,7 +48,7 @@ class SourceTestUtilsTest(unittest.TestCase):
     data = self._create_data(100)
     source = self._create_source(data)
     self.assertItemsEqual(
-        data, source_test_utils.readFromSource(source, None, None))
+        data, source_test_utils.read_from_source(source, None, None))
 
   def test_source_equals_reference_source(self):
     data = self._create_data(100)
@@ -60,25 +60,25 @@ class SourceTestUtilsTest(unittest.TestCase):
                        'bundles. Please adjust the test so that at least '
                        'two splits get generated.', len(sources_info))
 
-    source_test_utils.assertSourcesEqualReferenceSource(
+    source_test_utils.assert_sources_equal_reference_source(
         (reference_source, None, None), sources_info)
 
   def test_split_at_fraction_successful(self):
     data = self._create_data(100)
     source = self._create_source(data)
-    result1 = source_test_utils.assertSplitAtFractionBehavior(
+    result1 = source_test_utils.assert_split_at_fraction_behavior(
         source, 10, 0.5,
         source_test_utils.ExpectedSplitOutcome.MUST_SUCCEED_AND_BE_CONSISTENT)
-    result2 = source_test_utils.assertSplitAtFractionBehavior(
+    result2 = source_test_utils.assert_split_at_fraction_behavior(
         source, 20, 0.5,
         source_test_utils.ExpectedSplitOutcome.MUST_SUCCEED_AND_BE_CONSISTENT)
     self.assertEquals(result1, result2)
     self.assertEquals(100, result1[0] + result1[1])
 
-    result3 = source_test_utils.assertSplitAtFractionBehavior(
+    result3 = source_test_utils.assert_split_at_fraction_behavior(
         source, 30, 0.8,
         source_test_utils.ExpectedSplitOutcome.MUST_SUCCEED_AND_BE_CONSISTENT)
-    result4 = source_test_utils.assertSplitAtFractionBehavior(
+    result4 = source_test_utils.assert_split_at_fraction_behavior(
         source, 50, 0.8,
         source_test_utils.ExpectedSplitOutcome.MUST_SUCCEED_AND_BE_CONSISTENT)
     self.assertEquals(result3, result4)
@@ -91,13 +91,13 @@ class SourceTestUtilsTest(unittest.TestCase):
     data = self._create_data(100)
     source = self._create_source(data)
 
-    result = source_test_utils.assertSplitAtFractionBehavior(
+    result = source_test_utils.assert_split_at_fraction_behavior(
         source, 90, 0.1, source_test_utils.ExpectedSplitOutcome.MUST_FAIL)
     self.assertEquals(result[0], 100)
     self.assertEquals(result[1], -1)
 
     with self.assertRaises(ValueError):
-      source_test_utils.assertSplitAtFractionBehavior(
+      source_test_utils.assert_split_at_fraction_behavior(
           source, 10, 0.5, source_test_utils.ExpectedSplitOutcome.MUST_FAIL)
 
   def test_split_at_fraction_binary(self):
@@ -105,7 +105,7 @@ class SourceTestUtilsTest(unittest.TestCase):
     source = self._create_source(data)
 
     stats = source_test_utils.SplitFractionStatistics([], [])
-    source_test_utils.assertSplitAtFractionBinary(
+    source_test_utils.assert_split_at_fraction_binary(
         source, data, 10, 0.5, None, 0.8, None, stats)
 
     # These lists should not be empty now.
@@ -115,7 +115,7 @@ class SourceTestUtilsTest(unittest.TestCase):
   def test_split_at_fraction_exhaustive(self):
     data = self._create_data(10)
     source = self._create_source(data)
-    source_test_utils.assertSplitAtFractionExhaustive(source)
+    source_test_utils.assert_split_at_fraction_exhaustive(source)
 
 if __name__ == '__main__':
   logging.getLogger().setLevel(logging.INFO)

http://git-wip-us.apache.org/repos/asf/beam/blob/1f66fbdc/sdks/python/apache_beam/io/textio_test.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/io/textio_test.py b/sdks/python/apache_beam/io/textio_test.py
index b3f4391..90dc665 100644
--- a/sdks/python/apache_beam/io/textio_test.py
+++ b/sdks/python/apache_beam/io/textio_test.py
@@ -255,7 +255,7 @@ class TextSourceTest(_TestCaseWithTempDirCleanUp):
     sources_info = ([
         (split.source, split.start_position, split.stop_position) for
         split in splits])
-    source_test_utils.assertSourcesEqualReferenceSource(
+    source_test_utils.assert_sources_equal_reference_source(
         reference_source_info, sources_info)
 
   def test_progress(self):
@@ -291,7 +291,7 @@ class TextSourceTest(_TestCaseWithTempDirCleanUp):
     assert len(expected_data) == 10
     source = TextSource(file_name, 0, CompressionTypes.UNCOMPRESSED, True,
                         coders.StrUtf8Coder())
-    source_test_utils.assertReentrantReadsSucceed((source, None, None))
+    source_test_utils.assert_reentrant_reads_succeed((source, None, None))
 
   def test_read_reentrant_after_splitting(self):
     file_name, expected_data = write_data(10)
@@ -300,7 +300,7 @@ class TextSourceTest(_TestCaseWithTempDirCleanUp):
                         coders.StrUtf8Coder())
     splits = [split for split in source.split(desired_bundle_size=100000)]
     assert len(splits) == 1
-    source_test_utils.assertReentrantReadsSucceed(
+    source_test_utils.assert_reentrant_reads_succeed(
         (splits[0].source, splits[0].start_position, splits[0].stop_position))
 
   def test_dynamic_work_rebalancing(self):
@@ -310,7 +310,7 @@ class TextSourceTest(_TestCaseWithTempDirCleanUp):
                         coders.StrUtf8Coder())
     splits = [split for split in source.split(desired_bundle_size=100000)]
     assert len(splits) == 1
-    source_test_utils.assertSplitAtFractionExhaustive(
+    source_test_utils.assert_split_at_fraction_exhaustive(
         splits[0].source, splits[0].start_position, splits[0].stop_position)
 
   def test_dynamic_work_rebalancing_windows_eol(self):
@@ -320,7 +320,7 @@ class TextSourceTest(_TestCaseWithTempDirCleanUp):
                         coders.StrUtf8Coder())
     splits = [split for split in source.split(desired_bundle_size=100000)]
     assert len(splits) == 1
-    source_test_utils.assertSplitAtFractionExhaustive(
+    source_test_utils.assert_split_at_fraction_exhaustive(
         splits[0].source, splits[0].start_position, splits[0].stop_position,
         perform_multi_threaded_test=False)
 
@@ -331,7 +331,7 @@ class TextSourceTest(_TestCaseWithTempDirCleanUp):
                         coders.StrUtf8Coder())
     splits = [split for split in source.split(desired_bundle_size=100000)]
     assert len(splits) == 1
-    source_test_utils.assertSplitAtFractionExhaustive(
+    source_test_utils.assert_split_at_fraction_exhaustive(
         splits[0].source, splits[0].start_position, splits[0].stop_position,
         perform_multi_threaded_test=False)
 
@@ -449,7 +449,7 @@ class TextSourceTest(_TestCaseWithTempDirCleanUp):
     sources_info = ([
         (split.source, split.start_position, split.stop_position) for
         split in splits])
-    source_test_utils.assertSourcesEqualReferenceSource(
+    source_test_utils.assert_sources_equal_reference_source(
         reference_source_info, sources_info)
 
   def test_read_gzip_empty_file(self):
@@ -561,10 +561,10 @@ class TextSourceTest(_TestCaseWithTempDirCleanUp):
         (split.source, split.start_position, split.stop_position) for
         split in splits])
     self.assertGreater(len(sources_info), 1)
-    reference_lines = source_test_utils.readFromSource(*reference_source_info)
+    reference_lines = source_test_utils.read_from_source(*reference_source_info)
     split_lines = []
     for source_info in sources_info:
-      split_lines.extend(source_test_utils.readFromSource(*source_info))
+      split_lines.extend(source_test_utils.read_from_source(*source_info))
 
     self.assertEqual(expected_data[2:], reference_lines)
     self.assertEqual(reference_lines, split_lines)

http://git-wip-us.apache.org/repos/asf/beam/blob/1f66fbdc/sdks/python/apache_beam/transforms/create_test.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/transforms/create_test.py b/sdks/python/apache_beam/transforms/create_test.py
index f4b1f07..2352acd 100644
--- a/sdks/python/apache_beam/transforms/create_test.py
+++ b/sdks/python/apache_beam/transforms/create_test.py
@@ -41,7 +41,7 @@ class CreateTest(unittest.TestCase):
 
   def check_read(self, values, coder):
     source = Create._create_source_from_iterable(values, coder)
-    read_values = source_test_utils.readFromSource(source)
+    read_values = source_test_utils.read_from_source(source)
     self.assertEqual(sorted(values), sorted(read_values))
 
   def test_create_source_read_with_initial_splits(self):
@@ -73,27 +73,27 @@ class CreateTest(unittest.TestCase):
     splits_info = [
         (split.source, split.start_position, split.stop_position)
         for split in splits]
-    source_test_utils.assertSourcesEqualReferenceSource((source, None, None),
-                                                        splits_info)
+    source_test_utils.assert_sources_equal_reference_source(
+        (source, None, None), splits_info)
 
   def test_create_source_read_reentrant(self):
     source = Create._create_source_from_iterable(range(9), self.coder)
-    source_test_utils.assertReentrantReadsSucceed((source, None, None))
+    source_test_utils.assert_reentrant_reads_succeed((source, None, None))
 
   def test_create_source_read_reentrant_with_initial_splits(self):
     source = Create._create_source_from_iterable(range(24), self.coder)
     for split in source.split(desired_bundle_size=5):
-      source_test_utils.assertReentrantReadsSucceed((split.source,
-                                                     split.start_position,
-                                                     split.stop_position))
+      source_test_utils.assert_reentrant_reads_succeed((split.source,
+                                                        split.start_position,
+                                                        split.stop_position))
 
   def test_create_source_dynamic_splitting(self):
     # 2 values
     source = Create._create_source_from_iterable(range(2), self.coder)
-    source_test_utils.assertSplitAtFractionExhaustive(source)
+    source_test_utils.assert_split_at_fraction_exhaustive(source)
     # Multiple values.
     source = Create._create_source_from_iterable(range(11), self.coder)
-    source_test_utils.assertSplitAtFractionExhaustive(
+    source_test_utils.assert_split_at_fraction_exhaustive(
         source, perform_multi_threaded_test=True)
 
   def test_create_source_progress(self):


[41/50] [abbrv] beam git commit: [BEAM-59] Register standard FileSystems wherever we register IOChannelFactories

Posted by ke...@apache.org.
[BEAM-59] Register standard FileSystems wherever we register IOChannelFactories

Additionally, drop an unnecessary use of `GcsOptions` in
`PipelineRunner`.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/b43c92f2
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/b43c92f2
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/b43c92f2

Branch: refs/heads/jstorm-runner
Commit: b43c92f208304cfc10d79b140682dfbe6580d7c4
Parents: c52ce7c
Author: Dan Halperin <dh...@google.com>
Authored: Mon Apr 17 20:39:48 2017 -0700
Committer: Jean-Baptiste Onofr� <jb...@apache.org>
Committed: Tue Apr 18 10:33:10 2017 +0200

----------------------------------------------------------------------
 .../flink/translation/utils/SerializedPipelineOptions.java    | 2 ++
 .../beam/runners/spark/translation/SparkRuntimeContext.java   | 2 ++
 .../main/java/org/apache/beam/sdk/runners/PipelineRunner.java | 7 +++----
 .../main/java/org/apache/beam/sdk/testing/TestPipeline.java   | 2 ++
 4 files changed, 9 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/b43c92f2/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/utils/SerializedPipelineOptions.java
----------------------------------------------------------------------
diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/utils/SerializedPipelineOptions.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/utils/SerializedPipelineOptions.java
index 390e6da..2256bb1 100644
--- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/utils/SerializedPipelineOptions.java
+++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/utils/SerializedPipelineOptions.java
@@ -24,6 +24,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.Serializable;
+import org.apache.beam.sdk.io.FileSystems;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.util.IOChannelUtils;
 
@@ -55,6 +56,7 @@ public class SerializedPipelineOptions implements Serializable {
         pipelineOptions = new ObjectMapper().readValue(serializedOptions, PipelineOptions.class);
 
         IOChannelUtils.registerIOFactoriesAllowOverride(pipelineOptions);
+        FileSystems.setDefaultConfigInWorkers(pipelineOptions);
       } catch (IOException e) {
         throw new RuntimeException("Couldn't deserialize the PipelineOptions.", e);
       }

http://git-wip-us.apache.org/repos/asf/beam/blob/b43c92f2/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkRuntimeContext.java
----------------------------------------------------------------------
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkRuntimeContext.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkRuntimeContext.java
index 4ccfead..9d0f576 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkRuntimeContext.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkRuntimeContext.java
@@ -29,6 +29,7 @@ import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.CannotProvideCoderException;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.CoderRegistry;
+import org.apache.beam.sdk.io.FileSystems;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.Aggregator;
 import org.apache.beam.sdk.transforms.Combine;
@@ -130,6 +131,7 @@ public class SparkRuntimeContext implements Serializable {
         }
         // register IO factories.
         IOChannelUtils.registerIOFactoriesAllowOverride(pipelineOptions);
+        FileSystems.setDefaultConfigInWorkers(pipelineOptions);
       }
       return pipelineOptions;
     }

http://git-wip-us.apache.org/repos/asf/beam/blob/b43c92f2/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PipelineRunner.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PipelineRunner.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PipelineRunner.java
index 80bb90f..7b2fba3 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PipelineRunner.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PipelineRunner.java
@@ -21,9 +21,8 @@ import static com.google.common.base.Preconditions.checkNotNull;
 
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.PipelineResult;
-import org.apache.beam.sdk.options.GcsOptions;
+import org.apache.beam.sdk.io.FileSystems;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.options.PipelineOptionsValidator;
 import org.apache.beam.sdk.util.IOChannelUtils;
 import org.apache.beam.sdk.util.InstanceBuilder;
 
@@ -41,11 +40,11 @@ public abstract class PipelineRunner<ResultT extends PipelineResult> {
    * @return The newly created runner.
    */
   public static PipelineRunner<? extends PipelineResult> fromOptions(PipelineOptions options) {
-    GcsOptions gcsOptions = PipelineOptionsValidator.validate(GcsOptions.class, options);
     checkNotNull(options);
 
     // (Re-)register standard IO factories. Clobbers any prior credentials.
-    IOChannelUtils.registerIOFactoriesAllowOverride(gcsOptions);
+    IOChannelUtils.registerIOFactoriesAllowOverride(options);
+    FileSystems.setDefaultConfigInWorkers(options);
 
     @SuppressWarnings("unchecked")
     PipelineRunner<? extends PipelineResult> result =

http://git-wip-us.apache.org/repos/asf/beam/blob/b43c92f2/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
----------------------------------------------------------------------
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
index a4ab196..3d3de51 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java
@@ -40,6 +40,7 @@ import java.util.Map.Entry;
 import javax.annotation.Nullable;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.PipelineResult;
+import org.apache.beam.sdk.io.FileSystems;
 import org.apache.beam.sdk.options.ApplicationNameOptions;
 import org.apache.beam.sdk.options.GcpOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
@@ -404,6 +405,7 @@ public class TestPipeline extends Pipeline implements TestRule {
       options.setStableUniqueNames(CheckEnabled.ERROR);
 
       IOChannelUtils.registerIOFactoriesAllowOverride(options);
+      FileSystems.setDefaultConfigInWorkers(options);
       return options;
     } catch (IOException e) {
       throw new RuntimeException(


[22/50] [abbrv] beam git commit: This closes #2551

Posted by ke...@apache.org.
This closes #2551


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/14e5cd28
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/14e5cd28
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/14e5cd28

Branch: refs/heads/jstorm-runner
Commit: 14e5cd28ca9427a9b4185759f96ff1d8265c374c
Parents: 588a4d0 faece41
Author: Thomas Groh <tg...@google.com>
Authored: Mon Apr 17 09:20:46 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Mon Apr 17 09:20:46 2017 -0700

----------------------------------------------------------------------
 runners/google-cloud-dataflow-java/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------



[25/50] [abbrv] beam git commit: This closes #2554

Posted by ke...@apache.org.
This closes #2554


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/075b621f
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/075b621f
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/075b621f

Branch: refs/heads/jstorm-runner
Commit: 075b621fc4c3a483f82f968ded8a120ca1926fe5
Parents: 8302783 4ae4e7d
Author: Thomas Groh <tg...@google.com>
Authored: Mon Apr 17 13:06:59 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Mon Apr 17 13:06:59 2017 -0700

----------------------------------------------------------------------
 .../org/apache/beam/sdk/options/PipelineOptions.java    | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------



[35/50] [abbrv] beam git commit: Set the Project of a Table Reference at Runtime

Posted by ke...@apache.org.
Set the Project of a Table Reference at Runtime

Instead of using the project at job submission time, use the project at
job execution time.


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/b9e65779
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/b9e65779
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/b9e65779

Branch: refs/heads/jstorm-runner
Commit: b9e657790c69ae4f9eead893655c595e34ded4da
Parents: a25c7d3
Author: Thomas Groh <tg...@google.com>
Authored: Mon Apr 17 15:41:57 2017 -0700
Committer: Thomas Groh <tg...@google.com>
Committed: Mon Apr 17 18:20:07 2017 -0700

----------------------------------------------------------------------
 .../beam/sdk/io/gcp/bigquery/BigQueryIO.java    | 59 +++++++-------------
 .../io/gcp/bigquery/BigQueryTableSource.java    | 30 +++++++++-
 2 files changed, 50 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/b9e65779/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
index f5f93b3..9753da5 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
@@ -410,7 +410,7 @@ public class BigQueryIO {
         }
       }
 
-      ValueProvider<TableReference> table = getTableWithDefaultProject(bqOptions);
+      ValueProvider<TableReference> table = getTableProvider();
 
       checkState(
           table == null || getQuery() == null,
@@ -428,6 +428,12 @@ public class BigQueryIO {
             getUseLegacySql() == null,
             "Invalid BigQueryIO.Read: Specifies a table with a SQL dialect"
                 + " preference, which only applies to queries");
+        if (table.isAccessible() && Strings.isNullOrEmpty(table.get().getProjectId())) {
+          LOG.info(
+              "Project of {} not set. The value of {}.getProject() at execution time will be used.",
+              TableReference.class.getSimpleName(),
+              BigQueryOptions.class.getSimpleName());
+        }
       } else /* query != null */ {
         checkState(
             getFlattenResults() != null, "flattenResults should not be null if query is set");
@@ -495,10 +501,13 @@ public class BigQueryIO {
                 extractDestinationDir,
                 getBigQueryServices());
       } else {
-        ValueProvider<TableReference> inputTable = getTableWithDefaultProject(bqOptions);
-        source = BigQueryTableSource.create(
-            jobIdToken, inputTable, extractDestinationDir, getBigQueryServices(),
-            StaticValueProvider.of(executingProject));
+        source =
+            BigQueryTableSource.create(
+                jobIdToken,
+                getTableProvider(),
+                extractDestinationDir,
+                getBigQueryServices(),
+                StaticValueProvider.of(executingProject));
       }
       PassThroughThenCleanup.CleanupOperation cleanupOperation =
           new PassThroughThenCleanup.CleanupOperation() {
@@ -506,12 +515,12 @@ public class BigQueryIO {
             void cleanup(PipelineOptions options) throws Exception {
               BigQueryOptions bqOptions = options.as(BigQueryOptions.class);
 
-              JobReference jobRef = new JobReference()
-                  .setProjectId(executingProject)
-                  .setJobId(getExtractJobId(jobIdToken));
+              JobReference jobRef =
+                  new JobReference()
+                      .setProjectId(executingProject)
+                      .setJobId(getExtractJobId(jobIdToken));
 
-              Job extractJob = getBigQueryServices().getJobService(bqOptions)
-                  .getJob(jobRef);
+              Job extractJob = getBigQueryServices().getJobService(bqOptions).getJob(jobRef);
 
               Collection<String> extractFiles = null;
               if (extractJob != null) {
@@ -526,7 +535,8 @@ public class BigQueryIO {
               if (extractFiles != null && !extractFiles.isEmpty()) {
                 new GcsUtilFactory().create(options).remove(extractFiles);
               }
-            }};
+            }
+          };
       return input.getPipeline()
           .apply(org.apache.beam.sdk.io.Read.from(source))
           .setCoder(getDefaultOutputCoder())
@@ -557,33 +567,6 @@ public class BigQueryIO {
 
     /**
      * Returns the table to read, or {@code null} if reading from a query instead.
-     *
-     * <p>If the table's project is not specified, use the executing project.
-     */
-    @Nullable ValueProvider<TableReference> getTableWithDefaultProject(
-        BigQueryOptions bqOptions) {
-      ValueProvider<TableReference> table = getTableProvider();
-      if (table == null) {
-        return table;
-      }
-      if (!table.isAccessible()) {
-        LOG.info("Using a dynamic value for table input. This must contain a project"
-            + " in the table reference: {}", table);
-        return table;
-      }
-      if (Strings.isNullOrEmpty(table.get().getProjectId())) {
-        // If user does not specify a project we assume the table to be located in
-        // the default project.
-        TableReference tableRef = table.get();
-        tableRef.setProjectId(bqOptions.getProject());
-        return NestedValueProvider.of(StaticValueProvider.of(
-            BigQueryHelpers.toJsonString(tableRef)), new JsonTableRefToTableRef());
-      }
-      return table;
-    }
-
-    /**
-     * Returns the table to read, or {@code null} if reading from a query instead.
      */
     @Nullable
     public ValueProvider<TableReference> getTableProvider() {

http://git-wip-us.apache.org/repos/asf/beam/blob/b9e65779/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableSource.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableSource.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableSource.java
index cbd5781..22aba64 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableSource.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableSource.java
@@ -24,6 +24,7 @@ import static com.google.common.base.Preconditions.checkState;
 import com.google.api.services.bigquery.model.TableReference;
 import com.google.api.services.bigquery.model.TableRow;
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Strings;
 import java.io.IOException;
 import java.util.concurrent.atomic.AtomicReference;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.TableRefToJson;
@@ -32,12 +33,15 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.ValueProvider;
 import org.apache.beam.sdk.options.ValueProvider.NestedValueProvider;
 import org.apache.beam.sdk.transforms.display.DisplayData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A {@link BigQuerySourceBase} for reading BigQuery tables.
  */
 @VisibleForTesting
 class BigQueryTableSource extends BigQuerySourceBase {
+  private static final Logger LOG = LoggerFactory.getLogger(BigQueryTableSource.class);
 
   static BigQueryTableSource create(
       ValueProvider<String> jobIdToken,
@@ -66,7 +70,31 @@ class BigQueryTableSource extends BigQuerySourceBase {
   @Override
   protected TableReference getTableToExtract(BigQueryOptions bqOptions) throws IOException {
     checkState(jsonTable.isAccessible());
-    return BigQueryIO.JSON_FACTORY.fromString(jsonTable.get(), TableReference.class);
+    TableReference tableReference =
+        BigQueryIO.JSON_FACTORY.fromString(jsonTable.get(), TableReference.class);
+    return setDefaultProjectIfAbsent(bqOptions, tableReference);
+  }
+
+  /**
+   * Sets the {@link TableReference#projectId} of the provided table reference to the id of the
+   * default project if the table reference does not have a project ID specified.
+   */
+  private TableReference setDefaultProjectIfAbsent(
+      BigQueryOptions bqOptions, TableReference tableReference) {
+    if (Strings.isNullOrEmpty(tableReference.getProjectId())) {
+      checkState(
+          !Strings.isNullOrEmpty(bqOptions.getProject()),
+          "No project ID set in %s or %s, cannot construct a complete %s",
+          TableReference.class.getSimpleName(),
+          BigQueryOptions.class.getSimpleName(),
+          TableReference.class.getSimpleName());
+      LOG.info(
+          "Project ID not set in {}. Using default project from {}.",
+          TableReference.class.getSimpleName(),
+          BigQueryOptions.class.getSimpleName());
+      tableReference.setProjectId(bqOptions.getProject());
+    }
+    return tableReference;
   }
 
   @Override


[30/50] [abbrv] beam git commit: This closes #2553

Posted by ke...@apache.org.
This closes #2553


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/85cfd0c7
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/85cfd0c7
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/85cfd0c7

Branch: refs/heads/jstorm-runner
Commit: 85cfd0c7c2e2e0730eed4f208ca637981d03fb6e
Parents: 32a576a efa82fa
Author: Ahmet Altay <al...@google.com>
Authored: Mon Apr 17 14:34:19 2017 -0700
Committer: Ahmet Altay <al...@google.com>
Committed: Mon Apr 17 14:34:19 2017 -0700

----------------------------------------------------------------------
 sdks/python/.pylintrc | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------