You are viewing a plain text version of this content. The canonical link for it is here.
Posted to github@beam.apache.org by GitBox <gi...@apache.org> on 2020/08/21 00:49:53 UTC

[GitHub] [beam] pabloem commented on a change in pull request #12415: [BEAM-10603] Add the RecordingManager and associated classes.

pabloem commented on a change in pull request #12415:
URL: https://github.com/apache/beam/pull/12415#discussion_r474338799



##########
File path: sdks/python/apache_beam/runners/interactive/recording_manager.py
##########
@@ -0,0 +1,334 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import absolute_import
+
+import logging
+import threading
+import time
+import warnings
+
+import apache_beam as beam
+from apache_beam.runners.interactive import background_caching_job as bcj
+from apache_beam.runners.interactive import interactive_environment as ie
+from apache_beam.runners.interactive import interactive_runner as ir
+from apache_beam.runners.interactive import pipeline_fragment as pf
+from apache_beam.runners.interactive import pipeline_instrument as pi
+from apache_beam.runners.interactive import utils
+from apache_beam.runners.interactive.options.capture_limiters import CountLimiter
+from apache_beam.runners.interactive.options.capture_limiters import ProcessingTimeLimiter
+
+_LOGGER = logging.getLogger(__name__)
+
+PipelineState = beam.runners.runner.PipelineState

Review comment:
       Why aren't you importing this class like a normal one?

##########
File path: sdks/python/apache_beam/runners/interactive/recording_manager.py
##########
@@ -0,0 +1,334 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import absolute_import
+
+import logging
+import threading
+import time
+import warnings
+
+import apache_beam as beam
+from apache_beam.runners.interactive import background_caching_job as bcj
+from apache_beam.runners.interactive import interactive_environment as ie
+from apache_beam.runners.interactive import interactive_runner as ir
+from apache_beam.runners.interactive import pipeline_fragment as pf
+from apache_beam.runners.interactive import pipeline_instrument as pi
+from apache_beam.runners.interactive import utils
+from apache_beam.runners.interactive.options.capture_limiters import CountLimiter
+from apache_beam.runners.interactive.options.capture_limiters import ProcessingTimeLimiter
+
+_LOGGER = logging.getLogger(__name__)
+
+PipelineState = beam.runners.runner.PipelineState
+
+
+class ElementStream:
+  """A stream of elements from a given PCollection."""
+  def __init__(
+      self,
+      pcoll,  # type: beam.pvalue.PCollection
+      var,  # type: str
+      cache_key,  # type: str
+      max_n,  # type: int
+      max_duration_secs  # type: float
+      ):
+    self._pcoll = pcoll
+    self._cache_key = cache_key
+    self._pipeline = pcoll.pipeline
+    self._var = var
+    self._n = max_n
+    self._duration_secs = max_duration_secs
+
+    # A small state variable that when True, indicates that no more new elements
+    # will be yielded if read() is called again.
+    self._done = False
+
+  def var(self):
+    # type: () -> str
+
+    """Returns the variable named that defined this PCollection."""
+    return self._var
+
+  def display_id(self, suffix):
+    # type: (str) -> str
+
+    """Returns a unique id able to be displayed in a web browser."""
+    return utils.obfuscate(self._cache_key, suffix)
+
+  def is_computed(self):
+    # type: () -> boolean
+
+    """Returns True if no more elements will be recorded."""
+    return self._pcoll in ie.current_env().computed_pcollections
+
+  def is_done(self):
+    # type: () -> boolean
+
+    """Returns True if no more new elements will be yielded."""
+    return self._done
+
+  def read(self, tail=True):
+    # type: (boolean) -> Any
+
+    """Reads the elements currently recorded."""
+
+    # Get the cache manager and wait until the file exists.
+    cache_manager = ie.current_env().get_cache_manager(self._pipeline)
+    while not cache_manager.exists('full', self._cache_key):
+      time.sleep(0.5)

Review comment:
       I remember sleeping gave us trouble earlier. Does it make sense to write a method in cache_manager to wait without sleeping? (maybe on a lock or some such thing?)
   

##########
File path: sdks/python/apache_beam/runners/interactive/interactive_runner.py
##########
@@ -152,7 +152,11 @@ def run_pipeline(self, pipeline, options):
               user_pipeline)):
         streaming_cache_manager = ie.current_env().get_cache_manager(
             user_pipeline)
-        if streaming_cache_manager:
+
+        # Only make the server if it doens't exist already.
+        if (streaming_cache_manager and
+            not ie.current_env().get_test_stream_service_controller(

Review comment:
       IIUC this change is fixing an unrelated issue, right?

##########
File path: sdks/python/apache_beam/runners/interactive/recording_manager_test.py
##########
@@ -0,0 +1,301 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import absolute_import
+
+import sys
+import unittest
+
+import apache_beam as beam
+from apache_beam import coders
+from apache_beam.portability.api.beam_interactive_api_pb2 import TestStreamFileRecord
+from apache_beam.runners.interactive import background_caching_job as bcj
+from apache_beam.runners.interactive import interactive_beam as ib
+from apache_beam.runners.interactive import interactive_environment as ie
+from apache_beam.runners.interactive import pipeline_instrument as pi
+from apache_beam.runners.interactive.interactive_runner import InteractiveRunner
+from apache_beam.runners.interactive.recording_manager import ElementStream
+from apache_beam.runners.interactive.recording_manager import Recording
+from apache_beam.runners.interactive.recording_manager import RecordingManager
+from apache_beam.runners.interactive.testing.test_cache_manager import FileRecordsBuilder
+from apache_beam.runners.interactive.testing.test_cache_manager import InMemoryCache
+from apache_beam.transforms.window import GlobalWindow
+from apache_beam.utils.timestamp import MIN_TIMESTAMP
+from apache_beam.utils.windowed_value import WindowedValue
+
+PipelineState = beam.runners.runner.PipelineState
+
+
+class MockPipelineResult(beam.runners.runner.PipelineResult):
+  """Mock class for controlling a PipelineResult."""
+  def __init__(self):
+    self._state = PipelineState.RUNNING
+
+  def wait_until_finish(self):
+    pass
+
+  def set_state(self, state):
+    self._state = state
+
+  @property
+  def state(self):
+    return self._state
+
+  def cancel(self):
+    self._state = PipelineState.CANCELLED
+
+
+class ElementStreamTest(unittest.TestCase):
+  def setUp(self):
+    ie.new_env()
+
+    self.cache = InMemoryCache()
+    self.p = beam.Pipeline()
+    self.pcoll = self.p | beam.Create([])
+    self.cache_key = str(pi.CacheKey('pcoll', '', '', ''))
+
+    # Create a MockPipelineResult to control the state of a fake run of the
+    # pipeline.
+    self.mock_result = MockPipelineResult()
+    ie.current_env().track_user_pipelines()
+    ie.current_env().set_pipeline_result(self.p, self.mock_result)
+    ie.current_env().set_cache_manager(self.cache, self.p)
+
+  def test_read(self):
+    """Test reading and if a stream is done no more elements are returned."""
+
+    self.mock_result.set_state(PipelineState.DONE)
+    self.cache.write(['expected'], 'full', self.cache_key)
+    self.cache.save_pcoder(None, 'full', self.cache_key)
+
+    stream = ElementStream(
+        self.pcoll, '', self.cache_key, max_n=1, max_duration_secs=1)
+
+    self.assertFalse(stream.is_done())
+    self.assertEqual(list(stream.read())[0], 'expected')
+    self.assertTrue(stream.is_done())
+
+  def test_done_if_terminated(self):
+    """Test that terminating the job sets the stream as done."""
+
+    self.cache.write(['expected'], 'full', self.cache_key)
+    self.cache.save_pcoder(None, 'full', self.cache_key)
+
+    stream = ElementStream(
+        self.pcoll, '', self.cache_key, max_n=100, max_duration_secs=10)
+
+    self.assertFalse(stream.is_done())
+    self.assertEqual(list(stream.read(tail=False))[0], 'expected')
+
+    # The limiters were not reached, so the stream is not done yet.
+    self.assertFalse(stream.is_done())
+
+    self.mock_result.set_state(PipelineState.DONE)
+    self.assertEqual(list(stream.read(tail=False))[0], 'expected')
+
+    # The underlying pipeline is terminated, so the stream won't yield new
+    # elements.
+    self.assertTrue(stream.is_done())
+
+  def test_read_n(self):
+    """Test that the stream only reads 'n' elements."""
+
+    self.mock_result.set_state(PipelineState.DONE)
+    self.cache.write(list(range(5)), 'full', self.cache_key)
+    self.cache.save_pcoder(None, 'full', self.cache_key)
+
+    stream = ElementStream(
+        self.pcoll, '', self.cache_key, max_n=1, max_duration_secs=1)
+    self.assertEqual(list(stream.read()), [0])
+    self.assertTrue(stream.is_done())
+
+    stream = ElementStream(
+        self.pcoll, '', self.cache_key, max_n=2, max_duration_secs=1)
+    self.assertEqual(list(stream.read()), [0, 1])
+    self.assertTrue(stream.is_done())
+
+    stream = ElementStream(
+        self.pcoll, '', self.cache_key, max_n=5, max_duration_secs=1)
+    self.assertEqual(list(stream.read()), list(range(5)))
+    self.assertTrue(stream.is_done())
+
+    # Test that if the user asks for more than in the cache it still returns.
+    stream = ElementStream(
+        self.pcoll, '', self.cache_key, max_n=10, max_duration_secs=1)
+    self.assertEqual(list(stream.read()), list(range(5)))
+    self.assertTrue(stream.is_done())
+
+  def test_read_duration(self):
+    """Test that the stream only reads a 'duration' of elements."""
+
+    values = (FileRecordsBuilder(tag=self.cache_key)
+              .advance_processing_time(1)
+              .add_element(element=0, event_time_secs=0)
+              .advance_processing_time(1)
+              .add_element(element=1, event_time_secs=1)
+              .advance_processing_time(1)
+              .add_element(element=2, event_time_secs=3)
+              .advance_processing_time(1)
+              .add_element(element=3, event_time_secs=4)
+              .advance_processing_time(1)
+              .add_element(element=4, event_time_secs=5)
+              .build()) # yapf: disable
+
+    self.mock_result.set_state(PipelineState.DONE)
+    self.cache.write(values, 'full', self.cache_key)
+    self.cache.save_pcoder(None, 'full', self.cache_key)
+
+    # The elements read from the cache are TestStreamFileRecord instances and
+    # have the underlying elements encoded. This method decodes the elements
+    # from the TestStreamFileRecord.
+    def get_elements(events):
+      coder = coders.FastPrimitivesCoder()
+      elements = []
+      for e in events:
+        if not isinstance(e, TestStreamFileRecord):
+          continue
+
+        if e.recorded_event.element_event:
+          elements += ([
+              coder.decode(el.encoded_element)
+              for el in e.recorded_event.element_event.elements
+          ])
+      return elements
+
+    # The following tests a progression of reading different durations from the
+    # cache.
+    stream = ElementStream(
+        self.pcoll, '', self.cache_key, max_n=100, max_duration_secs=1)
+    self.assertSequenceEqual(get_elements(stream.read()), [0])
+
+    stream = ElementStream(
+        self.pcoll, '', self.cache_key, max_n=100, max_duration_secs=2)
+    self.assertSequenceEqual(get_elements(stream.read()), [0, 1])
+
+    stream = ElementStream(
+        self.pcoll, '', self.cache_key, max_n=100, max_duration_secs=10)
+    self.assertSequenceEqual(get_elements(stream.read()), [0, 1, 2, 3, 4])
+
+
+class RecordingTest(unittest.TestCase):
+  def setUp(self):
+    ie.new_env()
+
+  @unittest.skipIf(
+      sys.version_info < (3, 6, 0),
+      'This test requires at least Python 3.6 to work.')
+  def test_computed(self):
+    """Tests that a PCollection is marked as computed only in a complete state.
+
+    Because the background caching job is now long-lived, repeated runs of a
+    PipelineFragment may yield different results for the same PCollection.
+    """
+
+    p = beam.Pipeline(InteractiveRunner())
+    elems = p | beam.Create([0, 1, 2])
+
+    ib.watch(locals())
+
+    # Create a MockPipelineResult to control the state of a fake run of the
+    # pipeline.
+    mock_result = MockPipelineResult()
+    ie.current_env().track_user_pipelines()
+    ie.current_env().set_pipeline_result(p, mock_result)
+
+    # Create a mock BackgroundCachingJob that will control whether to set the
+    # PCollections as computed or not.
+    bcj_mock_result = MockPipelineResult()
+    background_caching_job = bcj.BackgroundCachingJob(bcj_mock_result, [])
+
+    # Create a recording.
+    recording = Recording(
+        p, [elems],
+        mock_result,
+        pi.PipelineInstrument(p),
+        max_n=10,
+        max_duration_secs=60)
+
+    # The background caching job and the recording isn't done yet so there may
+    # be more elements to be recorded.
+    self.assertFalse(recording.is_computed())
+    self.assertFalse(recording.computed())
+    self.assertTrue(recording.uncomputed())
+
+    # The recording is finished but the background caching job is not. There
+    # may still be more elements to record, or the intermediate PCollection may
+    # have stopped caching in an incomplete state, e.g. before a window could
+    # fire.
+    mock_result.set_state(PipelineState.DONE)
+    recording.wait_until_finish()
+
+    self.assertFalse(recording.is_computed())
+    self.assertFalse(recording.computed())
+    self.assertTrue(recording.uncomputed())
+
+    # The background caching job finished before we started a recording which
+    # is a sure signal that there will be no more elements.
+    bcj_mock_result.set_state(PipelineState.DONE)
+    ie.current_env().set_background_caching_job(p, background_caching_job)
+    recording = Recording(
+        p, [elems],
+        mock_result,
+        pi.PipelineInstrument(p),
+        max_n=10,
+        max_duration_secs=60)
+    recording.wait_until_finish()
+
+    # There are no more elements and the recording finished, meaning that the
+    # intermediate PCollections are in a complete state. They can now be marked
+    # as computed.
+    self.assertTrue(recording.is_computed())
+    self.assertTrue(recording.computed())
+    self.assertFalse(recording.uncomputed())
+
+
+class RecordingManagerTest(unittest.TestCase):
+  def setUp(self):
+    ie.new_env()
+
+  @unittest.skipIf(
+      sys.version_info < (3, 6, 0),
+      'This test requires at least Python 3.6 to work.')
+  def test_basic_wordcount(self):

Review comment:
       I'm a little confused about this test. Where does the pipeline run here?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org