You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by ib...@apache.org on 2019/10/18 14:52:45 UTC

[beam] 01/02: [BEAM-8431] move job_name to StandardOptions

This is an automated email from the ASF dual-hosted git repository.

ibzib pushed a commit to branch job_name
in repository https://gitbox.apache.org/repos/asf/beam.git

commit cc15863750ca555353d72fffd57881fb69a584cd
Author: Kyle Weaver <kc...@google.com>
AuthorDate: Fri Oct 18 16:45:59 2019 +0200

    [BEAM-8431] move job_name to StandardOptions
---
 sdks/python/apache_beam/options/pipeline_options.py | 11 +++++++----
 1 file changed, 7 insertions(+), 4 deletions(-)

diff --git a/sdks/python/apache_beam/options/pipeline_options.py b/sdks/python/apache_beam/options/pipeline_options.py
index 5842f7e..a5d8ca7 100644
--- a/sdks/python/apache_beam/options/pipeline_options.py
+++ b/sdks/python/apache_beam/options/pipeline_options.py
@@ -363,6 +363,9 @@ class StandardOptions(PipelineOptions):
                         default=False,
                         action='store_true',
                         help='Whether to enable streaming mode.')
+    parser.add_argument('--job_name',
+                        default=None,
+                        help='Name of the job. Usage depends on the runner.')
 
 
 class TypeOptions(PipelineOptions):
@@ -421,6 +424,10 @@ class GoogleCloudOptions(PipelineOptions):
   STORAGE_API_SERVICE = 'storage.googleapis.com'
   DATAFLOW_ENDPOINT = 'https://dataflow.googleapis.com'
 
+  job_name = property(
+      lambda self: return self.view_as(StandardOptions).job_name,
+      lambda self, job_name: self.view_as(StandardOptions).job_name = job_name)
+
   @classmethod
   def _add_argparse_args(cls, parser):
     parser.add_argument(
@@ -435,10 +442,6 @@ class GoogleCloudOptions(PipelineOptions):
                         help='Name of the Cloud project owning the Dataflow '
                         'job.')
     # Remote execution must check that this option is not None.
-    parser.add_argument('--job_name',
-                        default=None,
-                        help='Name of the Cloud Dataflow job.')
-    # Remote execution must check that this option is not None.
     parser.add_argument('--staging_location',
                         default=None,
                         help='GCS path for staging code packages needed by '