You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2020/06/05 14:40:25 UTC

[airflow] branch master updated: Add 'main' param to template_fields in DataprocSubmitPySparkJobOperator (#9154)

This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/master by this push:
     new 9bcdada  Add 'main' param to template_fields in DataprocSubmitPySparkJobOperator (#9154)
9bcdada is described below

commit 9bcdadaf7e6e73d3d2246fbbd32a9f30a1b43ca9
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Fri Jun 5 15:39:42 2020 +0100

    Add 'main' param to template_fields in DataprocSubmitPySparkJobOperator (#9154)
---
 airflow/providers/google/cloud/operators/dataproc.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py
index 0f7233b..8c4a002 100644
--- a/airflow/providers/google/cloud/operators/dataproc.py
+++ b/airflow/providers/google/cloud/operators/dataproc.py
@@ -1242,7 +1242,7 @@ class DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
     Start a PySpark Job on a Cloud DataProc cluster.
 
     :param main: [Required] The Hadoop Compatible Filesystem (HCFS) URI of the main
-            Python file to use as the driver. Must be a .py file.
+            Python file to use as the driver. Must be a .py file. (templated)
     :type main: str
     :param arguments: Arguments for the job. (templated)
     :type arguments: list
@@ -1256,7 +1256,7 @@ class DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
     :type pyfiles: list
     """
 
-    template_fields = ['arguments', 'job_name', 'cluster_name',
+    template_fields = ['main', 'arguments', 'job_name', 'cluster_name',
                        'region', 'dataproc_jars', 'dataproc_properties']
     ui_color = '#0273d4'
     job_type = 'pyspark_job'