You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by ro...@apache.org on 2019/10/15 22:33:44 UTC

[beam] branch master updated: Clean up a few places where we pass arbitrary kwargs to PTransform constructor.

This is an automated email from the ASF dual-hosted git repository.

robertwb pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new 3556b67  Clean up a few places where we pass arbitrary kwargs to PTransform constructor.
     new e4ca3e1  Merge pull request #9674 from tvalentyn/ptransform_cleanup
3556b67 is described below

commit 3556b677d33911ed0c5e67f8f2bc73169275c3ee
Author: Valentyn Tymofieiev <va...@google.com>
AuthorDate: Thu Sep 26 18:05:33 2019 -0700

    Clean up a few places where we pass arbitrary kwargs to PTransform constructor.
---
 .../apache_beam/examples/snippets/snippets.py       |  4 ++--
 sdks/python/apache_beam/io/tfrecordio.py            | 21 ++++++---------------
 2 files changed, 8 insertions(+), 17 deletions(-)

diff --git a/sdks/python/apache_beam/examples/snippets/snippets.py b/sdks/python/apache_beam/examples/snippets/snippets.py
index 510e814..3455e86 100644
--- a/sdks/python/apache_beam/examples/snippets/snippets.py
+++ b/sdks/python/apache_beam/examples/snippets/snippets.py
@@ -910,9 +910,9 @@ class SimpleKVWriter(iobase.Writer):
 # [START model_custom_sink_new_ptransform]
 class WriteToKVSink(PTransform):
 
-  def __init__(self, simplekv, url, final_table_name, **kwargs):
+  def __init__(self, simplekv, url, final_table_name):
     self._simplekv = simplekv
-    super(WriteToKVSink, self).__init__(**kwargs)
+    super(WriteToKVSink, self).__init__()
     self._url = url
     self._final_table_name = final_table_name
 
diff --git a/sdks/python/apache_beam/io/tfrecordio.py b/sdks/python/apache_beam/io/tfrecordio.py
index a07878e..7b0bd87 100644
--- a/sdks/python/apache_beam/io/tfrecordio.py
+++ b/sdks/python/apache_beam/io/tfrecordio.py
@@ -205,8 +205,7 @@ class ReadAllFromTFRecord(PTransform):
   def __init__(
       self,
       coder=coders.BytesCoder(),
-      compression_type=CompressionTypes.AUTO,
-      **kwargs):
+      compression_type=CompressionTypes.AUTO):
     """Initialize the ``ReadAllFromTFRecord`` transform.
 
     Args:
@@ -214,10 +213,8 @@ class ReadAllFromTFRecord(PTransform):
       compression_type: Used to handle compressed input files. Default value
           is CompressionTypes.AUTO, in which case the file_path's extension will
           be used to detect the compression.
-      **kwargs: optional args dictionary. These are passed through to parent
-        constructor.
     """
-    super(ReadAllFromTFRecord, self).__init__(**kwargs)
+    super(ReadAllFromTFRecord, self).__init__()
     source_from_file = partial(
         _create_tfrecordio_source, compression_type=compression_type,
         coder=coder)
@@ -239,8 +236,7 @@ class ReadFromTFRecord(PTransform):
                file_pattern,
                coder=coders.BytesCoder(),
                compression_type=CompressionTypes.AUTO,
-               validate=True,
-               **kwargs):
+               validate=True):
     """Initialize a ReadFromTFRecord transform.
 
     Args:
@@ -251,13 +247,11 @@ class ReadFromTFRecord(PTransform):
           be used to detect the compression.
       validate: Boolean flag to verify that the files exist during the pipeline
           creation time.
-      **kwargs: optional args dictionary. These are passed through to parent
-        constructor.
 
     Returns:
       A ReadFromTFRecord transform object.
     """
-    super(ReadFromTFRecord, self).__init__(**kwargs)
+    super(ReadFromTFRecord, self).__init__()
     self._source = _TFRecordSource(file_pattern, coder, compression_type,
                                    validate)
 
@@ -298,8 +292,7 @@ class WriteToTFRecord(PTransform):
                file_name_suffix='',
                num_shards=0,
                shard_name_template=None,
-               compression_type=CompressionTypes.AUTO,
-               **kwargs):
+               compression_type=CompressionTypes.AUTO):
     """Initialize WriteToTFRecord transform.
 
     Args:
@@ -320,13 +313,11 @@ class WriteToTFRecord(PTransform):
       compression_type: Used to handle compressed output files. Typical value
           is CompressionTypes.AUTO, in which case the file_path's extension will
           be used to detect the compression.
-      **kwargs: Optional args dictionary. These are passed through to parent
-        constructor.
 
     Returns:
       A WriteToTFRecord transform object.
     """
-    super(WriteToTFRecord, self).__init__(**kwargs)
+    super(WriteToTFRecord, self).__init__()
     self._sink = _TFRecordSink(file_path_prefix, coder, file_name_suffix,
                                num_shards, shard_name_template,
                                compression_type)