You are viewing a plain text version of this content. The canonical link for it is here.
Posted to github@beam.apache.org by GitBox <gi...@apache.org> on 2020/05/28 21:47:18 UTC

[GitHub] [beam] ihji commented on a change in pull request #11847: [BEAM-10125] adding cross-language KafkaIO integration test

ihji commented on a change in pull request #11847:
URL: https://github.com/apache/beam/pull/11847#discussion_r432142695



##########
File path: sdks/python/apache_beam/io/external/xlang_kafkaio_it_test.py
##########
@@ -0,0 +1,145 @@
+"""Integration test for Python cross-language pipelines for Java KafkaIO."""
+
+from __future__ import absolute_import
+
+import contextlib
+import logging
+import os
+import socket
+import subprocess
+import time
+import typing
+import unittest
+
+import grpc
+
+import apache_beam as beam
+from apache_beam.io.external.kafka import ReadFromKafka
+from apache_beam.io.external.kafka import WriteToKafka
+from apache_beam.metrics import Metrics
+from apache_beam.options.pipeline_options import PipelineOptions
+from apache_beam.testing.test_pipeline import TestPipeline
+
+
+class CrossLanguageKafkaIO(object):
+  def __init__(self, bootstrap_servers, topic, expansion_service=None):
+    self.bootstrap_servers = bootstrap_servers
+    self.topic = topic
+    self.expansion_service = expansion_service or (
+        'localhost:%s' % os.environ.get('EXPANSION_PORT'))
+    self.sum_counter = Metrics.counter('source', 'elements_sum')
+
+  def build_write_pipeline(self, pipeline):
+    _ = (
+        pipeline
+        | 'Impulse' >> beam.Impulse()
+        | 'Generate' >> beam.FlatMap(lambda x: range(1000)) # pylint: disable=range-builtin-not-iterating
+        | 'Reshuffle' >> beam.Reshuffle()
+        | 'MakeKV' >> beam.Map(lambda x:
+                               (b'', str(x).encode())).with_output_types(
+                                   typing.Tuple[bytes, bytes])
+        | 'WriteToKafka' >> WriteToKafka(
+            producer_config={'bootstrap.servers': self.bootstrap_servers},
+            topic=self.topic,
+            expansion_service=self.expansion_service))
+
+  def build_read_pipeline(self, pipeline):
+    _ = (
+        pipeline
+        | 'ReadFromKafka' >> ReadFromKafka(
+            consumer_config={
+                'bootstrap.servers': self.bootstrap_servers,
+                'auto.offset.reset': 'earliest'
+            },
+            topics=[self.topic],
+            expansion_service=self.expansion_service)
+        | 'Windowing' >> beam.WindowInto(
+            beam.window.FixedWindows(300),
+            trigger=beam.transforms.trigger.AfterProcessingTime(60),
+            accumulation_mode=beam.transforms.trigger.AccumulationMode.
+            DISCARDING)
+        | 'DecodingValue' >> beam.Map(lambda elem: int(elem[1].decode()))
+        | 'CombineGlobally' >> beam.CombineGlobally(sum).without_defaults()
+        | 'SetSumCounter' >> beam.Map(self.sum_counter.inc))
+
+  def run_xlang_kafkaio(self, pipeline):
+    self.build_write_pipeline(pipeline)
+    self.build_read_pipeline(pipeline)
+    pipeline.run(False)
+
+
+@unittest.skipUnless(
+    os.environ.get('LOCAL_KAFKA_JAR'),
+    "LOCAL_KAFKA_JAR environment var is not provided.")
+@unittest.skipUnless(
+    os.environ.get('EXPANSION_JAR'),
+    "EXPANSION_JAR environment var is not provided.")
+class CrossLanguageKafkaIOTest(unittest.TestCase):
+  def get_open_port(self):
+    s = None
+    try:
+      s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    except:  # pylint: disable=bare-except
+      # Above call will fail for nodes that only support IPv6.
+      s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
+    s.bind(('localhost', 0))
+    s.listen(1)
+    port = s.getsockname()[1]
+    s.close()
+    return port
+
+  @contextlib.contextmanager
+  def local_services(self, expansion_service_jar_file, local_kafka_jar_file):
+    expansion_service_port = str(self.get_open_port())
+    kafka_port = str(self.get_open_port())
+    zookeeper_port = str(self.get_open_port())
+
+    expansion_server = None
+    kafka_server = None
+    try:
+      expansion_server = subprocess.Popen(
+          ['java', '-jar', expansion_service_jar_file, expansion_service_port])
+      kafka_server = subprocess.Popen(

Review comment:
       Yes, this is for external testing (probably only for small scale correctness tests, we may still need kubernetes cluster for large scale performance tests).




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org