You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2021/12/16 12:44:40 UTC

[airflow] branch main updated: Fix MyPy Errors for dataproc package (#20327)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 1746819  Fix MyPy Errors for dataproc package (#20327)
1746819 is described below

commit 174681911f96f17d41a4f560ca08d5e200944f7f
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Thu Dec 16 13:44:13 2021 +0100

    Fix MyPy Errors for dataproc package (#20327)
    
    
    Part of #19891
---
 airflow/providers/google/cloud/hooks/dataproc.py   | 53 +++++++++++-----------
 .../google/cloud/hooks/dataproc_metastore.py       | 26 +++++------
 .../providers/google/cloud/operators/dataproc.py   | 30 ++++++------
 .../google/cloud/operators/dataproc_metastore.py   | 34 +++++++-------
 setup.cfg                                          | 15 ++++++
 .../providers/google/cloud/hooks/test_dataproc.py  | 36 +++++++--------
 .../google/cloud/hooks/test_dataproc_metastore.py  | 36 +++++++--------
 7 files changed, 122 insertions(+), 108 deletions(-)

diff --git a/airflow/providers/google/cloud/hooks/dataproc.py b/airflow/providers/google/cloud/hooks/dataproc.py
index 1ac35c0..282b6f9 100644
--- a/airflow/providers/google/cloud/hooks/dataproc.py
+++ b/airflow/providers/google/cloud/hooks/dataproc.py
@@ -309,7 +309,7 @@ class DataprocHook(GoogleBaseHook):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Creates a cluster in a project.
@@ -376,7 +376,7 @@ class DataprocHook(GoogleBaseHook):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Deletes a cluster in a project.
@@ -426,7 +426,7 @@ class DataprocHook(GoogleBaseHook):
         project_id: str,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Gets cluster diagnostic information. After the operation completes GCS uri to
@@ -466,7 +466,7 @@ class DataprocHook(GoogleBaseHook):
         project_id: str,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Gets the resource representation for a cluster in a project.
@@ -504,7 +504,7 @@ class DataprocHook(GoogleBaseHook):
         page_size: Optional[int] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Lists all regions/{region}/clusters in a project.
@@ -544,13 +544,13 @@ class DataprocHook(GoogleBaseHook):
         cluster: Union[Dict, Cluster],
         update_mask: Union[Dict, FieldMask],
         project_id: str,
-        region: str = None,
+        region: Optional[str] = None,
         location: Optional[str] = None,
         graceful_decommission_timeout: Optional[Union[Dict, Duration]] = None,
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Updates a cluster in a project.
@@ -645,11 +645,11 @@ class DataprocHook(GoogleBaseHook):
         self,
         template: Union[Dict, WorkflowTemplate],
         project_id: str,
-        region: str = None,
+        region: Optional[str] = None,
         location: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ) -> WorkflowTemplate:
         """
         Creates new workflow template.
@@ -695,14 +695,14 @@ class DataprocHook(GoogleBaseHook):
         self,
         template_name: str,
         project_id: str,
-        region: str = None,
+        region: Optional[str] = None,
         location: Optional[str] = None,
         version: Optional[int] = None,
         request_id: Optional[str] = None,
         parameters: Optional[Dict[str, str]] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Instantiates a template and begins execution.
@@ -764,12 +764,12 @@ class DataprocHook(GoogleBaseHook):
         self,
         template: Union[Dict, WorkflowTemplate],
         project_id: str,
-        region: str = None,
+        region: Optional[str] = None,
         location: Optional[str] = None,
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Instantiates a template and begins execution.
@@ -824,7 +824,7 @@ class DataprocHook(GoogleBaseHook):
         job_id: str,
         project_id: str,
         wait_time: int = 10,
-        region: str = None,
+        region: Optional[str] = None,
         location: Optional[str] = None,
         timeout: Optional[int] = None,
     ) -> None:
@@ -877,11 +877,11 @@ class DataprocHook(GoogleBaseHook):
         self,
         job_id: str,
         project_id: str,
-        region: str = None,
+        region: Optional[str] = None,
         location: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ) -> Job:
         """
         Gets the resource representation for a job in a project.
@@ -928,12 +928,12 @@ class DataprocHook(GoogleBaseHook):
         self,
         job: Union[dict, Job],
         project_id: str,
-        region: str = None,
+        region: Optional[str] = None,
         location: Optional[str] = None,
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ) -> Job:
         """
         Submits a job to a cluster.
@@ -1013,7 +1013,7 @@ class DataprocHook(GoogleBaseHook):
         location: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ) -> Job:
         """
         Starts a job cancellation request.
@@ -1072,7 +1072,7 @@ class DataprocHook(GoogleBaseHook):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = "",
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Creates a batch workload.
@@ -1124,8 +1124,8 @@ class DataprocHook(GoogleBaseHook):
         project_id: str,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
-    ):
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> None:
         """
         Deletes the batch workload resource.
 
@@ -1149,7 +1149,7 @@ class DataprocHook(GoogleBaseHook):
         client = self.get_batch_client(region)
         name = f"projects/{project_id}/regions/{region}/batches/{batch_id}"
 
-        result = client.delete_batch(
+        client.delete_batch(
             request={
                 'name': name,
             },
@@ -1157,7 +1157,6 @@ class DataprocHook(GoogleBaseHook):
             timeout=timeout,
             metadata=metadata,
         )
-        return result
 
     @GoogleBaseHook.fallback_to_default_project_id
     def get_batch(
@@ -1167,8 +1166,8 @@ class DataprocHook(GoogleBaseHook):
         project_id: str,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
-    ):
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> Batch:
         """
         Gets the batch workload resource representation.
 
@@ -1211,7 +1210,7 @@ class DataprocHook(GoogleBaseHook):
         page_token: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Lists batch workloads.
diff --git a/airflow/providers/google/cloud/hooks/dataproc_metastore.py b/airflow/providers/google/cloud/hooks/dataproc_metastore.py
index 327c98b..ddb72ca 100644
--- a/airflow/providers/google/cloud/hooks/dataproc_metastore.py
+++ b/airflow/providers/google/cloud/hooks/dataproc_metastore.py
@@ -18,7 +18,7 @@
 #
 """This module contains a Google Cloud Dataproc Metastore hook."""
 
-from typing import Dict, Optional, Sequence, Tuple, Union
+from typing import Any, Dict, Optional, Sequence, Tuple, Union
 
 from google.api_core.operation import Operation
 from google.api_core.retry import Retry
@@ -42,7 +42,7 @@ class DataprocMetastoreHook(GoogleBaseHook):
             credentials=self._get_credentials(), client_info=self.client_info, client_options=client_options
         )
 
-    def wait_for_operation(self, timeout: float, operation: Operation):
+    def wait_for_operation(self, timeout: Optional[float], operation: Operation):
         """Waits for long-lasting operation to complete."""
         try:
             return operation.result(timeout=timeout)
@@ -56,12 +56,12 @@ class DataprocMetastoreHook(GoogleBaseHook):
         project_id: str,
         region: str,
         service_id: str,
-        backup: Backup,
+        backup: Union[Dict[Any, Any], Backup],
         backup_id: str,
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Creates a new backup in a given project and location.
@@ -127,7 +127,7 @@ class DataprocMetastoreHook(GoogleBaseHook):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Creates a new MetadataImport in a given project and location.
@@ -193,7 +193,7 @@ class DataprocMetastoreHook(GoogleBaseHook):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = (),
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Creates a metastore service in a project and location.
@@ -251,7 +251,7 @@ class DataprocMetastoreHook(GoogleBaseHook):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Deletes a single backup.
@@ -307,7 +307,7 @@ class DataprocMetastoreHook(GoogleBaseHook):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Deletes a single service.
@@ -358,7 +358,7 @@ class DataprocMetastoreHook(GoogleBaseHook):
         database_dump_type: Optional[DatabaseDumpSpec] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Exports metadata from a service.
@@ -416,7 +416,7 @@ class DataprocMetastoreHook(GoogleBaseHook):
         service_id: str,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Gets the details of a single service.
@@ -512,7 +512,7 @@ class DataprocMetastoreHook(GoogleBaseHook):
         order_by: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Lists backups in a service.
@@ -590,7 +590,7 @@ class DataprocMetastoreHook(GoogleBaseHook):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Restores a service from a backup.
@@ -663,7 +663,7 @@ class DataprocMetastoreHook(GoogleBaseHook):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
     ):
         """
         Updates the parameters of a single service.
diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py
index a0b2453..aed1b57 100644
--- a/airflow/providers/google/cloud/operators/dataproc.py
+++ b/airflow/providers/google/cloud/operators/dataproc.py
@@ -533,7 +533,7 @@ class DataprocCreateClusterOperator(BaseOperator):
         use_if_exists: bool = True,
         retry: Optional[Retry] = None,
         timeout: float = 1 * 60 * 60,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -896,7 +896,7 @@ class DataprocDeleteClusterOperator(BaseOperator):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -1656,11 +1656,11 @@ class DataprocCreateWorkflowTemplateOperator(BaseOperator):
         *,
         template: Dict,
         project_id: str,
-        region: str = None,
+        region: Optional[str] = None,
         location: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -1765,7 +1765,7 @@ class DataprocInstantiateWorkflowTemplateOperator(BaseOperator):
         parameters: Optional[Dict[str, str]] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -1862,7 +1862,7 @@ class DataprocInstantiateInlineWorkflowTemplateOperator(BaseOperator):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -1953,12 +1953,12 @@ class DataprocSubmitJobOperator(BaseOperator):
         *,
         project_id: str,
         job: Dict,
-        region: str = None,
+        region: Optional[str] = None,
         location: Optional[str] = None,
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         asynchronous: bool = False,
@@ -2097,13 +2097,13 @@ class DataprocUpdateClusterOperator(BaseOperator):
         cluster: Union[Dict, Cluster],
         update_mask: Union[Dict, FieldMask],
         graceful_decommission_timeout: Union[Dict, Duration],
-        region: str = None,
+        region: Optional[str] = None,
         location: Optional[str] = None,
         request_id: Optional[str] = None,
         project_id: Optional[str] = None,
         retry: Retry = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -2211,14 +2211,14 @@ class DataprocCreateBatchOperator(BaseOperator):
     def __init__(
         self,
         *,
-        region: str = None,
+        region: Optional[str] = None,
         project_id: str,
         batch: Union[Dict, Batch],
         batch_id: Optional[str] = None,
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = "",
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -2312,7 +2312,7 @@ class DataprocDeleteBatchOperator(BaseOperator):
         project_id: str,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = "",
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -2384,7 +2384,7 @@ class DataprocGetBatchOperator(BaseOperator):
         project_id: str,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = "",
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -2461,7 +2461,7 @@ class DataprocListBatchesOperator(BaseOperator):
         page_token: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = "",
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
diff --git a/airflow/providers/google/cloud/operators/dataproc_metastore.py b/airflow/providers/google/cloud/operators/dataproc_metastore.py
index 31379f0..2f81fc5 100644
--- a/airflow/providers/google/cloud/operators/dataproc_metastore.py
+++ b/airflow/providers/google/cloud/operators/dataproc_metastore.py
@@ -19,7 +19,7 @@
 """This module contains Google Dataproc Metastore operators."""
 
 from time import sleep
-from typing import Collection, Dict, Optional, Sequence, Tuple, Union
+from typing import Dict, List, Optional, Sequence, Tuple, Union
 
 from google.api_core.retry import Retry, exponential_sleep_generator
 from google.cloud.metastore_v1 import MetadataExport, MetadataManagementActivity
@@ -101,7 +101,7 @@ class DataprocMetastoreCreateBackupOperator(BaseOperator):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = (),
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -218,12 +218,12 @@ class DataprocMetastoreCreateMetadataImportOperator(BaseOperator):
         project_id: str,
         region: str,
         service_id: str,
-        metadata_import: Union[MetadataImport, Dict[str, Collection[str]]],
+        metadata_import: MetadataImport,
         metadata_import_id: str,
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = (),
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -317,12 +317,12 @@ class DataprocMetastoreCreateServiceOperator(BaseOperator):
         *,
         region: str,
         project_id: str,
-        service: Optional[Union[Dict, Service]] = None,
+        service: Union[Dict, Service],
         service_id: str,
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = (),
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -431,7 +431,7 @@ class DataprocMetastoreDeleteBackupOperator(BaseOperator):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = (),
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -499,7 +499,7 @@ class DataprocMetastoreDeleteServiceOperator(BaseOperator):
         service_id: str,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = (),
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -514,7 +514,7 @@ class DataprocMetastoreDeleteServiceOperator(BaseOperator):
         self.gcp_conn_id = gcp_conn_id
         self.impersonation_chain = impersonation_chain
 
-    def execute(self, context) -> dict:
+    def execute(self, context):
         hook = DataprocMetastoreHook(
             gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain
         )
@@ -588,7 +588,7 @@ class DataprocMetastoreExportMetadataOperator(BaseOperator):
         database_dump_type: Optional[DatabaseDumpSpec] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = (),
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -699,7 +699,7 @@ class DataprocMetastoreGetServiceOperator(BaseOperator):
         service_id: str,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = (),
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -782,7 +782,7 @@ class DataprocMetastoreListBackupsOperator(BaseOperator):
         order_by: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = (),
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -801,7 +801,7 @@ class DataprocMetastoreListBackupsOperator(BaseOperator):
         self.gcp_conn_id = gcp_conn_id
         self.impersonation_chain = impersonation_chain
 
-    def execute(self, context: dict) -> dict:
+    def execute(self, context: dict) -> List[dict]:
         hook = DataprocMetastoreHook(
             gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain
         )
@@ -893,7 +893,7 @@ class DataprocMetastoreRestoreServiceOperator(BaseOperator):
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = (),
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
@@ -914,7 +914,7 @@ class DataprocMetastoreRestoreServiceOperator(BaseOperator):
         self.gcp_conn_id = gcp_conn_id
         self.impersonation_chain = impersonation_chain
 
-    def execute(self, context) -> dict:
+    def execute(self, context):
         hook = DataprocMetastoreHook(
             gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain
         )
@@ -1025,11 +1025,11 @@ class DataprocMetastoreUpdateServiceOperator(BaseOperator):
         region: str,
         service_id: str,
         service: Union[Dict, Service],
-        update_mask: Union[Dict, FieldMask],
+        update_mask: FieldMask,
         request_id: Optional[str] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        metadata: Optional[Sequence[Tuple[str, str]]] = (),
+        metadata: Sequence[Tuple[str, str]] = (),
         gcp_conn_id: str = "google_cloud_default",
         impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
         **kwargs,
diff --git a/setup.cfg b/setup.cfg
index a950f4f..adba25a 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -209,6 +209,21 @@ no_implicit_optional = False
 [mypy-google.cloud.automl_v1beta1.services.auto_ml.*]
 no_implicit_optional = False
 
+[mypy-google.cloud.metastore_v1.services.dataproc_metastore.*]
+no_implicit_optional = False
+
+[mypy-google.cloud.dataproc_v1.services.workflow_template_service.*]
+no_implicit_optional = False
+
+[mypy-google.cloud.dataproc_v1.services.job_controller.*]
+no_implicit_optional = False
+
+[mypy-google.cloud.dataproc_v1.services.batch_controller.*]
+no_implicit_optional = False
+
+[mypy-google.cloud.dataproc_v1.services.cluster_controller.*]
+no_implicit_optional = False
+
 [isort]
 line_length=110
 combine_as_imports = true
diff --git a/tests/providers/google/cloud/hooks/test_dataproc.py b/tests/providers/google/cloud/hooks/test_dataproc.py
index 598bb91..76714b4 100644
--- a/tests/providers/google/cloud/hooks/test_dataproc.py
+++ b/tests/providers/google/cloud/hooks/test_dataproc.py
@@ -241,7 +241,7 @@ class TestDataprocHook(unittest.TestCase):
                 cluster=CLUSTER,
                 request_id=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -258,7 +258,7 @@ class TestDataprocHook(unittest.TestCase):
                 cluster_uuid=None,
                 request_id=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -273,7 +273,7 @@ class TestDataprocHook(unittest.TestCase):
                 region=GCP_LOCATION,
                 cluster_name=CLUSTER_NAME,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -289,7 +289,7 @@ class TestDataprocHook(unittest.TestCase):
                 region=GCP_LOCATION,
                 cluster_name=CLUSTER_NAME,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -307,7 +307,7 @@ class TestDataprocHook(unittest.TestCase):
                 filter=filter_,
                 page_size=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -333,7 +333,7 @@ class TestDataprocHook(unittest.TestCase):
                 graceful_decommission_timeout=None,
                 request_id=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -364,7 +364,7 @@ class TestDataprocHook(unittest.TestCase):
                     graceful_decommission_timeout=None,
                     request_id=None,
                 ),
-                metadata=None,
+                metadata=(),
                 retry=None,
                 timeout=None,
             )
@@ -532,7 +532,7 @@ class TestDataprocHook(unittest.TestCase):
             ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(DATAPROC_STRING.format("DataprocHook.get_job_client"))
@@ -552,7 +552,7 @@ class TestDataprocHook(unittest.TestCase):
                 ),
                 retry=None,
                 timeout=None,
-                metadata=None,
+                metadata=(),
             )
             assert warning_message == str(warnings[0].message)
 
@@ -572,7 +572,7 @@ class TestDataprocHook(unittest.TestCase):
             ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(DATAPROC_STRING.format("DataprocHook.get_job_client"))
@@ -593,7 +593,7 @@ class TestDataprocHook(unittest.TestCase):
                 ),
                 retry=None,
                 timeout=None,
-                metadata=None,
+                metadata=(),
             )
             assert warning_message == str(warnings[0].message)
         with pytest.raises(TypeError):
@@ -620,7 +620,7 @@ class TestDataprocHook(unittest.TestCase):
             ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(DATAPROC_STRING.format("DataprocHook.get_job_client"))
@@ -636,7 +636,7 @@ class TestDataprocHook(unittest.TestCase):
             ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(DATAPROC_STRING.format("DataprocHook.get_job_client"))
@@ -656,7 +656,7 @@ class TestDataprocHook(unittest.TestCase):
                 ),
                 retry=None,
                 timeout=None,
-                metadata=None,
+                metadata=(),
             )
             assert warning_message == str(warnings[0].message)
 
@@ -676,7 +676,7 @@ class TestDataprocHook(unittest.TestCase):
                 batch_id=BATCH_ID,
                 request_id=None,
             ),
-            metadata="",
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -693,7 +693,7 @@ class TestDataprocHook(unittest.TestCase):
             request=dict(
                 name=BATCH_NAME.format(GCP_PROJECT, GCP_LOCATION, BATCH_ID),
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -710,7 +710,7 @@ class TestDataprocHook(unittest.TestCase):
             request=dict(
                 name=BATCH_NAME.format(GCP_PROJECT, GCP_LOCATION, BATCH_ID),
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -728,7 +728,7 @@ class TestDataprocHook(unittest.TestCase):
                 page_size=None,
                 page_token=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
diff --git a/tests/providers/google/cloud/hooks/test_dataproc_metastore.py b/tests/providers/google/cloud/hooks/test_dataproc_metastore.py
index 1d86296..37e6b3b 100644
--- a/tests/providers/google/cloud/hooks/test_dataproc_metastore.py
+++ b/tests/providers/google/cloud/hooks/test_dataproc_metastore.py
@@ -81,7 +81,7 @@ class TestDataprocMetastoreWithDefaultProjectIdHook(TestCase):
                 backup_id=TEST_BACKUP_ID,
                 request_id=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -103,7 +103,7 @@ class TestDataprocMetastoreWithDefaultProjectIdHook(TestCase):
                 metadata_import_id=TEST_METADATA_IMPORT_ID,
                 request_id=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -161,7 +161,7 @@ class TestDataprocMetastoreWithDefaultProjectIdHook(TestCase):
                 name=TEST_NAME_BACKUPS.format(TEST_PROJECT_ID, TEST_REGION, TEST_SERVICE_ID, TEST_BACKUP_ID),
                 request_id=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -181,7 +181,7 @@ class TestDataprocMetastoreWithDefaultProjectIdHook(TestCase):
             ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(DATAPROC_METASTORE_STRING.format("DataprocMetastoreHook.get_dataproc_metastore_client"))
@@ -202,7 +202,7 @@ class TestDataprocMetastoreWithDefaultProjectIdHook(TestCase):
             ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(DATAPROC_METASTORE_STRING.format("DataprocMetastoreHook.get_dataproc_metastore_client"))
@@ -217,7 +217,7 @@ class TestDataprocMetastoreWithDefaultProjectIdHook(TestCase):
             request=dict(
                 name=TEST_PARENT_SERVICES.format(TEST_PROJECT_ID, TEST_REGION, TEST_SERVICE_ID),
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -238,7 +238,7 @@ class TestDataprocMetastoreWithDefaultProjectIdHook(TestCase):
                 filter=None,
                 order_by=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -264,7 +264,7 @@ class TestDataprocMetastoreWithDefaultProjectIdHook(TestCase):
                 restore_type=None,
                 request_id=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -287,7 +287,7 @@ class TestDataprocMetastoreWithDefaultProjectIdHook(TestCase):
             ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
 
@@ -315,7 +315,7 @@ class TestDataprocMetastoreWithoutDefaultProjectIdHook(TestCase):
                 backup_id=TEST_BACKUP_ID,
                 request_id=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -337,7 +337,7 @@ class TestDataprocMetastoreWithoutDefaultProjectIdHook(TestCase):
                 metadata_import_id=TEST_METADATA_IMPORT_ID,
                 request_id=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -377,7 +377,7 @@ class TestDataprocMetastoreWithoutDefaultProjectIdHook(TestCase):
                 name=TEST_NAME_BACKUPS.format(TEST_PROJECT_ID, TEST_REGION, TEST_SERVICE_ID, TEST_BACKUP_ID),
                 request_id=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -397,7 +397,7 @@ class TestDataprocMetastoreWithoutDefaultProjectIdHook(TestCase):
             ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(DATAPROC_METASTORE_STRING.format("DataprocMetastoreHook.get_dataproc_metastore_client"))
@@ -418,7 +418,7 @@ class TestDataprocMetastoreWithoutDefaultProjectIdHook(TestCase):
             ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(DATAPROC_METASTORE_STRING.format("DataprocMetastoreHook.get_dataproc_metastore_client"))
@@ -433,7 +433,7 @@ class TestDataprocMetastoreWithoutDefaultProjectIdHook(TestCase):
             request=dict(
                 name=TEST_PARENT_SERVICES.format(TEST_PROJECT_ID, TEST_REGION, TEST_SERVICE_ID),
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -454,7 +454,7 @@ class TestDataprocMetastoreWithoutDefaultProjectIdHook(TestCase):
                 filter=None,
                 order_by=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -480,7 +480,7 @@ class TestDataprocMetastoreWithoutDefaultProjectIdHook(TestCase):
                 restore_type=None,
                 request_id=None,
             ),
-            metadata=None,
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -503,5 +503,5 @@ class TestDataprocMetastoreWithoutDefaultProjectIdHook(TestCase):
             ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )