You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2020/08/01 23:00:33 UTC

[airflow] branch master updated: Remove `args` parameter from provider operator constructors (#10097)

This is an automated email from the ASF dual-hosted git repository.

kamilbregula pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/master by this push:
     new aeea712  Remove `args` parameter from provider operator constructors (#10097)
aeea712 is described below

commit aeea71274d4527ff2351102e94aa38bda6099e7f
Author: chipmyersjr <ch...@gmail.com>
AuthorDate: Sat Aug 1 15:59:47 2020 -0700

    Remove `args` parameter from provider operator constructors (#10097)
---
 airflow/providers/amazon/aws/operators/athena.py   |  3 +-
 .../amazon/aws/operators/cloud_formation.py        |  8 +-
 airflow/providers/amazon/aws/operators/datasync.py |  3 +-
 .../amazon/aws/operators/ec2_start_instance.py     |  3 +-
 .../amazon/aws/operators/ec2_stop_instance.py      |  3 +-
 .../amazon/aws/operators/emr_add_steps.py          |  4 +-
 .../amazon/aws/operators/emr_create_job_flow.py    |  4 +-
 .../amazon/aws/operators/emr_modify_cluster.py     |  4 +-
 .../amazon/aws/operators/emr_terminate_job_flow.py |  4 +-
 airflow/providers/amazon/aws/operators/glue.py     |  4 +-
 .../providers/amazon/aws/operators/s3_bucket.py    |  6 +-
 .../amazon/aws/operators/s3_copy_object.py         |  4 +-
 .../amazon/aws/operators/s3_delete_objects.py      |  4 +-
 .../amazon/aws/operators/s3_file_transform.py      |  4 +-
 airflow/providers/amazon/aws/operators/s3_list.py  |  3 +-
 .../amazon/aws/operators/sagemaker_base.py         |  4 +-
 .../amazon/aws/operators/sagemaker_endpoint.py     |  4 +-
 .../aws/operators/sagemaker_endpoint_config.py     |  4 +-
 .../amazon/aws/operators/sagemaker_model.py        |  4 +-
 .../amazon/aws/operators/sagemaker_transform.py    |  4 +-
 .../amazon/aws/operators/sagemaker_tuning.py       |  4 +-
 airflow/providers/amazon/aws/operators/sns.py      |  4 +-
 airflow/providers/amazon/aws/operators/sqs.py      |  3 +-
 .../step_function_get_execution_output.py          |  4 +-
 .../aws/operators/step_function_start_execution.py |  4 +-
 .../cncf/kubernetes/operators/spark_kubernetes.py  |  4 +-
 airflow/providers/dingding/operators/dingding.py   |  3 +-
 airflow/providers/docker/operators/docker.py       |  3 +-
 airflow/providers/docker/operators/docker_swarm.py |  3 +-
 airflow/providers/exasol/operators/exasol.py       |  4 +-
 airflow/providers/google/ads/operators/ads.py      |  3 +-
 airflow/providers/google/cloud/operators/automl.py | 39 ++++------
 .../providers/google/cloud/operators/bigquery.py   | 56 ++++++--------
 .../google/cloud/operators/bigquery_dts.py         |  9 +--
 .../providers/google/cloud/operators/bigtable.py   | 20 ++---
 .../google/cloud/operators/cloud_build.py          |  4 +-
 .../google/cloud/operators/cloud_memorystore.py    | 33 +++-----
 .../operators/cloud_storage_transfer_service.py    | 30 +++-----
 .../google/cloud/operators/datacatalog.py          | 63 +++++----------
 .../providers/google/cloud/operators/dataflow.py   |  9 +--
 .../providers/google/cloud/operators/datafusion.py | 30 +++-----
 .../providers/google/cloud/operators/dataproc.py   | 43 ++++-------
 .../providers/google/cloud/operators/datastore.py  |  6 +-
 airflow/providers/google/cloud/operators/dlp.py    | 90 ++++++++--------------
 .../providers/google/cloud/operators/functions.py  | 11 ++-
 airflow/providers/google/cloud/operators/gcs.py    | 27 +++----
 .../google/cloud/operators/kubernetes_engine.py    |  6 +-
 .../google/cloud/operators/life_sciences.py        |  4 +-
 .../providers/google/cloud/operators/mlengine.py   | 36 +++------
 .../google/cloud/operators/natural_language.py     | 12 +--
 airflow/providers/google/cloud/operators/pubsub.py | 18 ++---
 .../providers/google/cloud/operators/spanner.py    | 24 +++---
 .../google/cloud/operators/speech_to_text.py       |  3 +-
 .../google/cloud/operators/stackdriver.py          | 40 +++++-----
 airflow/providers/google/cloud/operators/tasks.py  | 39 ++++------
 .../google/cloud/operators/text_to_speech.py       |  3 +-
 .../providers/google/cloud/operators/translate.py  |  3 +-
 .../google/cloud/operators/translate_speech.py     |  3 +-
 .../google/cloud/operators/video_intelligence.py   |  9 +--
 airflow/providers/google/cloud/operators/vision.py | 51 ++++--------
 .../google/firebase/operators/firestore.py         |  3 +-
 .../marketing_platform/operators/analytics.py      | 18 ++---
 .../operators/campaign_manager.py                  | 18 ++---
 .../marketing_platform/operators/display_video.py  | 24 ++----
 .../marketing_platform/operators/search_ads.py     |  6 +-
 airflow/providers/google/suite/operators/sheets.py |  3 +-
 airflow/providers/grpc/operators/grpc.py           |  4 +-
 airflow/providers/http/operators/http.py           |  4 +-
 airflow/providers/jdbc/operators/jdbc.py           |  4 +-
 .../jenkins/operators/jenkins_job_trigger.py       |  3 +-
 airflow/providers/jira/operators/jira.py           |  3 +-
 .../microsoft/azure/operators/adls_list.py         |  3 +-
 airflow/providers/microsoft/azure/operators/adx.py |  3 +-
 .../microsoft/azure/operators/azure_batch.py       |  3 +-
 .../azure/operators/azure_container_instances.py   |  3 +-
 .../microsoft/azure/operators/azure_cosmos.py      |  3 +-
 .../microsoft/azure/operators/wasb_delete_blob.py  |  3 +-
 .../providers/microsoft/mssql/operators/mssql.py   |  4 +-
 .../providers/microsoft/winrm/operators/winrm.py   |  3 +-
 airflow/providers/mysql/operators/mysql.py         |  4 +-
 .../providers/opsgenie/operators/opsgenie_alert.py |  3 +-
 airflow/providers/oracle/operators/oracle.py       |  4 +-
 airflow/providers/papermill/operators/papermill.py |  4 +-
 airflow/providers/postgres/operators/postgres.py   |  4 +-
 airflow/providers/qubole/operators/qubole.py       |  2 +-
 airflow/providers/qubole/operators/qubole_check.py |  8 +-
 airflow/providers/redis/operators/redis_publish.py |  4 +-
 .../operators/tableau_refresh_workbook.py          |  3 +-
 .../segment/operators/segment_track_event.py       |  3 +-
 airflow/providers/sftp/operators/sftp.py           |  3 +-
 .../providers/singularity/operators/singularity.py |  3 +-
 airflow/providers/slack/operators/slack_webhook.py |  2 -
 airflow/providers/snowflake/operators/snowflake.py |  4 +-
 airflow/providers/sqlite/operators/sqlite.py       |  4 +-
 airflow/providers/ssh/operators/ssh.py             |  3 +-
 airflow/providers/vertica/operators/vertica.py     |  4 +-
 .../yandex/operators/yandexcloud_dataproc.py       | 18 ++---
 97 files changed, 392 insertions(+), 644 deletions(-)

diff --git a/airflow/providers/amazon/aws/operators/athena.py b/airflow/providers/amazon/aws/operators/athena.py
index 3294588..0d3e677 100644
--- a/airflow/providers/amazon/aws/operators/athena.py
+++ b/airflow/providers/amazon/aws/operators/athena.py
@@ -59,10 +59,9 @@ class AWSAthenaOperator(BaseOperator):
         result_configuration=None,
         sleep_time=30,
         max_tries=None,
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.query = query
         self.database = database
         self.output_location = output_location
diff --git a/airflow/providers/amazon/aws/operators/cloud_formation.py b/airflow/providers/amazon/aws/operators/cloud_formation.py
index f6eadae..128f3fc 100644
--- a/airflow/providers/amazon/aws/operators/cloud_formation.py
+++ b/airflow/providers/amazon/aws/operators/cloud_formation.py
@@ -49,8 +49,8 @@ class CloudFormationCreateStackOperator(BaseOperator):
             stack_name,
             params,
             aws_conn_id='aws_default',
-            *args, **kwargs):
-        super().__init__(*args, **kwargs)
+            **kwargs):
+        super().__init__(**kwargs)
         self.stack_name = stack_name
         self.params = params
         self.aws_conn_id = aws_conn_id
@@ -87,8 +87,8 @@ class CloudFormationDeleteStackOperator(BaseOperator):
             stack_name,
             params=None,
             aws_conn_id='aws_default',
-            *args, **kwargs):
-        super().__init__(*args, **kwargs)
+            **kwargs):
+        super().__init__(**kwargs)
         self.params = params or {}
         self.stack_name = stack_name
         self.params = params
diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/airflow/providers/amazon/aws/operators/datasync.py
index 1bc3b9a..b674559 100644
--- a/airflow/providers/amazon/aws/operators/datasync.py
+++ b/airflow/providers/amazon/aws/operators/datasync.py
@@ -121,10 +121,9 @@ class AWSDataSyncOperator(BaseOperator):
         update_task_kwargs=None,
         task_execution_kwargs=None,
         delete_task_after_execution=False,
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         # Assignments
         self.aws_conn_id = aws_conn_id
diff --git a/airflow/providers/amazon/aws/operators/ec2_start_instance.py b/airflow/providers/amazon/aws/operators/ec2_start_instance.py
index b3ca97b..7aafb46 100644
--- a/airflow/providers/amazon/aws/operators/ec2_start_instance.py
+++ b/airflow/providers/amazon/aws/operators/ec2_start_instance.py
@@ -49,9 +49,8 @@ class EC2StartInstanceOperator(BaseOperator):
                  aws_conn_id: str = "aws_default",
                  region_name: Optional[str] = None,
                  check_interval: float = 15,
-                 *args,
                  **kwargs):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.instance_id = instance_id
         self.aws_conn_id = aws_conn_id
         self.region_name = region_name
diff --git a/airflow/providers/amazon/aws/operators/ec2_stop_instance.py b/airflow/providers/amazon/aws/operators/ec2_stop_instance.py
index 1b05847..181e022 100644
--- a/airflow/providers/amazon/aws/operators/ec2_stop_instance.py
+++ b/airflow/providers/amazon/aws/operators/ec2_stop_instance.py
@@ -49,9 +49,8 @@ class EC2StopInstanceOperator(BaseOperator):
                  aws_conn_id: str = "aws_default",
                  region_name: Optional[str] = None,
                  check_interval: float = 15,
-                 *args,
                  **kwargs):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.instance_id = instance_id
         self.aws_conn_id = aws_conn_id
         self.region_name = region_name
diff --git a/airflow/providers/amazon/aws/operators/emr_add_steps.py b/airflow/providers/amazon/aws/operators/emr_add_steps.py
index 144de96..776a1f6 100644
--- a/airflow/providers/amazon/aws/operators/emr_add_steps.py
+++ b/airflow/providers/amazon/aws/operators/emr_add_steps.py
@@ -56,12 +56,12 @@ class EmrAddStepsOperator(BaseOperator):
             cluster_states=None,
             aws_conn_id='aws_default',
             steps=None,
-            *args, **kwargs):
+            **kwargs):
         if kwargs.get('xcom_push') is not None:
             raise AirflowException("'xcom_push' was deprecated, use 'do_xcom_push' instead")
         if not (job_flow_id is None) ^ (job_flow_name is None):
             raise AirflowException('Exactly one of job_flow_id or job_flow_name must be specified.')
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         steps = steps or []
         self.aws_conn_id = aws_conn_id
         self.job_flow_id = job_flow_id
diff --git a/airflow/providers/amazon/aws/operators/emr_create_job_flow.py b/airflow/providers/amazon/aws/operators/emr_create_job_flow.py
index 8ecee35..52cbc11 100644
--- a/airflow/providers/amazon/aws/operators/emr_create_job_flow.py
+++ b/airflow/providers/amazon/aws/operators/emr_create_job_flow.py
@@ -48,8 +48,8 @@ class EmrCreateJobFlowOperator(BaseOperator):
             emr_conn_id='emr_default',
             job_flow_overrides=None,
             region_name=None,
-            *args, **kwargs):
-        super().__init__(*args, **kwargs)
+            **kwargs):
+        super().__init__(**kwargs)
         self.aws_conn_id = aws_conn_id
         self.emr_conn_id = emr_conn_id
         if job_flow_overrides is None:
diff --git a/airflow/providers/amazon/aws/operators/emr_modify_cluster.py b/airflow/providers/amazon/aws/operators/emr_modify_cluster.py
index bbe163d..8a2a630 100644
--- a/airflow/providers/amazon/aws/operators/emr_modify_cluster.py
+++ b/airflow/providers/amazon/aws/operators/emr_modify_cluster.py
@@ -43,10 +43,10 @@ class EmrModifyClusterOperator(BaseOperator):
             cluster_id: str,
             step_concurrency_level: int,
             aws_conn_id: str = 'aws_default',
-            *args, **kwargs):
+            **kwargs):
         if kwargs.get('xcom_push') is not None:
             raise AirflowException("'xcom_push' was deprecated, use 'do_xcom_push' instead")
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.aws_conn_id = aws_conn_id
         self.cluster_id = cluster_id
         self.step_concurrency_level = step_concurrency_level
diff --git a/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py b/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py
index 4599f73..08a4ab4 100644
--- a/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py
+++ b/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py
@@ -39,8 +39,8 @@ class EmrTerminateJobFlowOperator(BaseOperator):
             self,
             job_flow_id,
             aws_conn_id='aws_default',
-            *args, **kwargs):
-        super().__init__(*args, **kwargs)
+            **kwargs):
+        super().__init__(**kwargs)
         self.job_flow_id = job_flow_id
         self.aws_conn_id = aws_conn_id
 
diff --git a/airflow/providers/amazon/aws/operators/glue.py b/airflow/providers/amazon/aws/operators/glue.py
index 67e8455..fc656d6 100644
--- a/airflow/providers/amazon/aws/operators/glue.py
+++ b/airflow/providers/amazon/aws/operators/glue.py
@@ -69,9 +69,9 @@ class AwsGlueJobOperator(BaseOperator):
                  region_name=None,
                  s3_bucket=None,
                  iam_role_name=None,
-                 *args, **kwargs
+                 **kwargs
                  ):  # pylint: disable=too-many-arguments
-        super(AwsGlueJobOperator, self).__init__(*args, **kwargs)
+        super(AwsGlueJobOperator, self).__init__(**kwargs)
         self.job_name = job_name
         self.job_desc = job_desc
         self.script_location = script_location
diff --git a/airflow/providers/amazon/aws/operators/s3_bucket.py b/airflow/providers/amazon/aws/operators/s3_bucket.py
index a740aba..a863cbd 100644
--- a/airflow/providers/amazon/aws/operators/s3_bucket.py
+++ b/airflow/providers/amazon/aws/operators/s3_bucket.py
@@ -43,9 +43,8 @@ class S3CreateBucketOperator(BaseOperator):
                  bucket_name,
                  aws_conn_id: Optional[str] = "aws_default",
                  region_name: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.bucket_name = bucket_name
         self.region_name = region_name
         self.aws_conn_id = aws_conn_id
@@ -79,9 +78,8 @@ class S3DeleteBucketOperator(BaseOperator):
                  bucket_name,
                  force_delete: Optional[bool] = False,
                  aws_conn_id: Optional[str] = "aws_default",
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.bucket_name = bucket_name
         self.force_delete = force_delete
         self.aws_conn_id = aws_conn_id
diff --git a/airflow/providers/amazon/aws/operators/s3_copy_object.py b/airflow/providers/amazon/aws/operators/s3_copy_object.py
index 5d67d4e..0c0cf9e 100644
--- a/airflow/providers/amazon/aws/operators/s3_copy_object.py
+++ b/airflow/providers/amazon/aws/operators/s3_copy_object.py
@@ -77,8 +77,8 @@ class S3CopyObjectOperator(BaseOperator):
             source_version_id=None,
             aws_conn_id='aws_default',
             verify=None,
-            *args, **kwargs):
-        super().__init__(*args, **kwargs)
+            **kwargs):
+        super().__init__(**kwargs)
 
         self.source_bucket_key = source_bucket_key
         self.dest_bucket_key = dest_bucket_key
diff --git a/airflow/providers/amazon/aws/operators/s3_delete_objects.py b/airflow/providers/amazon/aws/operators/s3_delete_objects.py
index 25047fe..d6d676d 100644
--- a/airflow/providers/amazon/aws/operators/s3_delete_objects.py
+++ b/airflow/providers/amazon/aws/operators/s3_delete_objects.py
@@ -69,12 +69,12 @@ class S3DeleteObjectsOperator(BaseOperator):
             prefix=None,
             aws_conn_id='aws_default',
             verify=None,
-            *args, **kwargs):
+            **kwargs):
 
         if not bool(keys) ^ bool(prefix):
             raise ValueError("Either keys or prefix should be set.")
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.bucket = bucket
         self.keys = keys
         self.prefix = prefix
diff --git a/airflow/providers/amazon/aws/operators/s3_file_transform.py b/airflow/providers/amazon/aws/operators/s3_file_transform.py
index 4f0e626..527b98b 100644
--- a/airflow/providers/amazon/aws/operators/s3_file_transform.py
+++ b/airflow/providers/amazon/aws/operators/s3_file_transform.py
@@ -95,9 +95,9 @@ class S3FileTransformOperator(BaseOperator):
             dest_aws_conn_id: str = 'aws_default',
             dest_verify: Optional[Union[bool, str]] = None,
             replace: bool = False,
-            *args, **kwargs) -> None:
+            **kwargs) -> None:
         # pylint: disable=too-many-arguments
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.source_s3_key = source_s3_key
         self.source_aws_conn_id = source_aws_conn_id
         self.source_verify = source_verify
diff --git a/airflow/providers/amazon/aws/operators/s3_list.py b/airflow/providers/amazon/aws/operators/s3_list.py
index 2406a22..017b93a 100644
--- a/airflow/providers/amazon/aws/operators/s3_list.py
+++ b/airflow/providers/amazon/aws/operators/s3_list.py
@@ -75,9 +75,8 @@ class S3ListOperator(BaseOperator):
                  delimiter='',
                  aws_conn_id='aws_default',
                  verify=None,
-                 *args,
                  **kwargs):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.bucket = bucket
         self.prefix = prefix
         self.delimiter = delimiter
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_base.py b/airflow/providers/amazon/aws/operators/sagemaker_base.py
index a4dd025..3e4c0e6 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_base.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_base.py
@@ -44,8 +44,8 @@ class SageMakerBaseOperator(BaseOperator):
     def __init__(self,
                  config,
                  aws_conn_id='aws_default',
-                 *args, **kwargs):
-        super().__init__(*args, **kwargs)
+                 **kwargs):
+        super().__init__(**kwargs)
 
         self.aws_conn_id = aws_conn_id
         self.config = config
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py b/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py
index 9a0cd4d..709b332 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py
@@ -77,9 +77,9 @@ class SageMakerEndpointOperator(SageMakerBaseOperator):
                  check_interval=30,
                  max_ingestion_time=None,
                  operation='create',
-                 *args, **kwargs):
+                 **kwargs):
         super().__init__(config=config,
-                         *args, **kwargs)
+                         **kwargs)
 
         self.config = config
         self.wait_for_completion = wait_for_completion
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py b/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py
index 8a2a9eb..2e6d4ff 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py
@@ -42,9 +42,9 @@ class SageMakerEndpointConfigOperator(SageMakerBaseOperator):
     @apply_defaults
     def __init__(self,
                  config,
-                 *args, **kwargs):
+                 **kwargs):
         super().__init__(config=config,
-                         *args, **kwargs)
+                         **kwargs)
 
         self.config = config
 
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_model.py b/airflow/providers/amazon/aws/operators/sagemaker_model.py
index 19c5373..b733d1d 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_model.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_model.py
@@ -39,9 +39,9 @@ class SageMakerModelOperator(SageMakerBaseOperator):
     @apply_defaults
     def __init__(self,
                  config,
-                 *args, **kwargs):
+                 **kwargs):
         super().__init__(config=config,
-                         *args, **kwargs)
+                         **kwargs)
 
         self.config = config
 
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_transform.py b/airflow/providers/amazon/aws/operators/sagemaker_transform.py
index 799ed2b..668861f 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_transform.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_transform.py
@@ -67,9 +67,9 @@ class SageMakerTransformOperator(SageMakerBaseOperator):
                  wait_for_completion=True,
                  check_interval=30,
                  max_ingestion_time=None,
-                 *args, **kwargs):
+                 **kwargs):
         super().__init__(config=config,
-                         *args, **kwargs)
+                         **kwargs)
         self.config = config
         self.wait_for_completion = wait_for_completion
         self.check_interval = check_interval
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_tuning.py b/airflow/providers/amazon/aws/operators/sagemaker_tuning.py
index 3dcc20c..d8e9a4b 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_tuning.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_tuning.py
@@ -60,9 +60,9 @@ class SageMakerTuningOperator(SageMakerBaseOperator):
                  wait_for_completion=True,
                  check_interval=30,
                  max_ingestion_time=None,
-                 *args, **kwargs):
+                 **kwargs):
         super().__init__(config=config,
-                         *args, **kwargs)
+                         **kwargs)
         self.config = config
         self.wait_for_completion = wait_for_completion
         self.check_interval = check_interval
diff --git a/airflow/providers/amazon/aws/operators/sns.py b/airflow/providers/amazon/aws/operators/sns.py
index 6988b18..124416f 100644
--- a/airflow/providers/amazon/aws/operators/sns.py
+++ b/airflow/providers/amazon/aws/operators/sns.py
@@ -50,8 +50,8 @@ class SnsPublishOperator(BaseOperator):
             aws_conn_id='aws_default',
             subject=None,
             message_attributes=None,
-            *args, **kwargs):
-        super().__init__(*args, **kwargs)
+            **kwargs):
+        super().__init__(**kwargs)
         self.target_arn = target_arn
         self.message = message
         self.subject = subject
diff --git a/airflow/providers/amazon/aws/operators/sqs.py b/airflow/providers/amazon/aws/operators/sqs.py
index 1d9b1fc..42251f1 100644
--- a/airflow/providers/amazon/aws/operators/sqs.py
+++ b/airflow/providers/amazon/aws/operators/sqs.py
@@ -48,9 +48,8 @@ class SQSPublishOperator(BaseOperator):
                  message_attributes=None,
                  delay_seconds=0,
                  aws_conn_id='aws_default',
-                 *args,
                  **kwargs):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.sqs_queue = sqs_queue
         self.aws_conn_id = aws_conn_id
         self.message_content = message_content
diff --git a/airflow/providers/amazon/aws/operators/step_function_get_execution_output.py b/airflow/providers/amazon/aws/operators/step_function_get_execution_output.py
index 2ef531c..f978414 100644
--- a/airflow/providers/amazon/aws/operators/step_function_get_execution_output.py
+++ b/airflow/providers/amazon/aws/operators/step_function_get_execution_output.py
@@ -41,8 +41,8 @@ class StepFunctionGetExecutionOutputOperator(BaseOperator):
     ui_color = '#f9c915'
 
     @apply_defaults
-    def __init__(self, execution_arn: str, aws_conn_id='aws_default', region_name=None, *args, **kwargs):
-        super().__init__(*args, **kwargs)
+    def __init__(self, execution_arn: str, aws_conn_id='aws_default', region_name=None, **kwargs):
+        super().__init__(**kwargs)
         self.execution_arn = execution_arn
         self.aws_conn_id = aws_conn_id
         self.region_name = region_name
diff --git a/airflow/providers/amazon/aws/operators/step_function_start_execution.py b/airflow/providers/amazon/aws/operators/step_function_start_execution.py
index f5ea75c..f6d719c 100644
--- a/airflow/providers/amazon/aws/operators/step_function_start_execution.py
+++ b/airflow/providers/amazon/aws/operators/step_function_start_execution.py
@@ -51,8 +51,8 @@ class StepFunctionStartExecutionOperator(BaseOperator):
     def __init__(self, state_machine_arn: str, name: Optional[str] = None,
                  state_machine_input: Union[dict, str, None] = None,
                  aws_conn_id='aws_default', region_name=None,
-                 *args, **kwargs):
-        super().__init__(*args, **kwargs)
+                 **kwargs):
+        super().__init__(**kwargs)
         self.state_machine_arn = state_machine_arn
         self.name = name
         self.input = state_machine_input
diff --git a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py b/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py
index 504980b..c9acd38 100644
--- a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py
+++ b/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py
@@ -47,8 +47,8 @@ class SparkKubernetesOperator(BaseOperator):
                  application_file: str,
                  namespace: Optional[str] = None,
                  kubernetes_conn_id: str = 'kubernetes_default',
-                 *args, **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+                 **kwargs) -> None:
+        super().__init__(**kwargs)
         self.application_file = application_file
         self.namespace = namespace
         self.kubernetes_conn_id = kubernetes_conn_id
diff --git a/airflow/providers/dingding/operators/dingding.py b/airflow/providers/dingding/operators/dingding.py
index 40f9872..d71c7fe 100644
--- a/airflow/providers/dingding/operators/dingding.py
+++ b/airflow/providers/dingding/operators/dingding.py
@@ -52,9 +52,8 @@ class DingdingOperator(BaseOperator):
                  message=None,
                  at_mobiles=None,
                  at_all=False,
-                 *args,
                  **kwargs):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.dingding_conn_id = dingding_conn_id
         self.message_type = message_type
         self.message = message
diff --git a/airflow/providers/docker/operators/docker.py b/airflow/providers/docker/operators/docker.py
index 40d601c..ba36d7f 100644
--- a/airflow/providers/docker/operators/docker.py
+++ b/airflow/providers/docker/operators/docker.py
@@ -162,10 +162,9 @@ class DockerOperator(BaseOperator):
             shm_size: Optional[int] = None,
             tty: Optional[bool] = False,
             cap_add: Optional[Iterable[str]] = None,
-            *args,
             **kwargs) -> None:
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.api_version = api_version
         self.auto_remove = auto_remove
         self.command = command
diff --git a/airflow/providers/docker/operators/docker_swarm.py b/airflow/providers/docker/operators/docker_swarm.py
index ea83c5c..fb38df4 100644
--- a/airflow/providers/docker/operators/docker_swarm.py
+++ b/airflow/providers/docker/operators/docker_swarm.py
@@ -100,9 +100,8 @@ class DockerSwarmOperator(DockerOperator):
             self,
             image,
             enable_logging=True,
-            *args,
             **kwargs):
-        super().__init__(image=image, *args, **kwargs)
+        super().__init__(image=image, **kwargs)
 
         self.enable_logging = enable_logging
         self.service = None
diff --git a/airflow/providers/exasol/operators/exasol.py b/airflow/providers/exasol/operators/exasol.py
index 090f1b9..dfc7181 100644
--- a/airflow/providers/exasol/operators/exasol.py
+++ b/airflow/providers/exasol/operators/exasol.py
@@ -53,8 +53,8 @@ class ExasolOperator(BaseOperator):
             autocommit: bool = False,
             parameters: Optional[Mapping] = None,
             schema: Optional[str] = None,
-            *args, **kwargs):
-        super(ExasolOperator, self).__init__(*args, **kwargs)
+            **kwargs):
+        super(ExasolOperator, self).__init__(**kwargs)
         self.exasol_conn_id = exasol_conn_id
         self.sql = sql
         self.autocommit = autocommit
diff --git a/airflow/providers/google/ads/operators/ads.py b/airflow/providers/google/ads/operators/ads.py
index 950dc1f..1718fb5 100644
--- a/airflow/providers/google/ads/operators/ads.py
+++ b/airflow/providers/google/ads/operators/ads.py
@@ -69,10 +69,9 @@ class GoogleAdsListAccountsOperator(BaseOperator):
         gcp_conn_id: str = "google_cloud_default",
         google_ads_conn_id: str = "google_ads_default",
         gzip: bool = False,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.bucket = bucket
         self.object_name = object_name
         self.gcp_conn_id = gcp_conn_id
diff --git a/airflow/providers/google/cloud/operators/automl.py b/airflow/providers/google/cloud/operators/automl.py
index 49d4c4c..8ef6d2b 100644
--- a/airflow/providers/google/cloud/operators/automl.py
+++ b/airflow/providers/google/cloud/operators/automl.py
@@ -72,10 +72,9 @@ class AutoMLTrainModelOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.model = model
         self.location = location
@@ -149,10 +148,9 @@ class AutoMLPredictOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.model_id = model_id
         self.params = params  # type: ignore
@@ -243,10 +241,9 @@ class AutoMLBatchPredictOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.model_id = model_id
         self.location = location
@@ -320,10 +317,9 @@ class AutoMLCreateDatasetOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.dataset = dataset
         self.location = location
@@ -397,10 +393,9 @@ class AutoMLImportDataOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.dataset_id = dataset_id
         self.input_config = input_config
@@ -491,10 +486,9 @@ class AutoMLTablesListColumnSpecsOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.dataset_id = dataset_id
         self.table_spec_id = table_spec_id
         self.field_mask = field_mask
@@ -570,10 +564,9 @@ class AutoMLTablesUpdateDatasetOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.dataset = dataset
         self.update_mask = update_mask
@@ -638,10 +631,9 @@ class AutoMLGetModelOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.model_id = model_id
         self.location = location
@@ -705,10 +697,9 @@ class AutoMLDeleteModelOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.model_id = model_id
         self.location = location
@@ -783,10 +774,9 @@ class AutoMLDeployModelOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.model_id = model_id
         self.image_detection_metadata = image_detection_metadata
@@ -864,10 +854,9 @@ class AutoMLTablesListTableSpecsOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.dataset_id = dataset_id
         self.filter_ = filter_
         self.page_size = page_size
@@ -933,10 +922,9 @@ class AutoMLListDatasetOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.project_id = project_id
         self.metadata = metadata
@@ -1005,10 +993,9 @@ class AutoMLDeleteDatasetOperator(BaseOperator):
         timeout: Optional[float] = None,
         retry: Optional[Retry] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.dataset_id = dataset_id
         self.location = location
diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/airflow/providers/google/cloud/operators/bigquery.py
index 62170e7..c729302 100644
--- a/airflow/providers/google/cloud/operators/bigquery.py
+++ b/airflow/providers/google/cloud/operators/bigquery.py
@@ -147,10 +147,9 @@ class BigQueryCheckOperator(CheckOperator):
         bigquery_conn_id: Optional[str] = None,
         use_legacy_sql: bool = True,
         location: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(sql=sql, *args, **kwargs)
+        super().__init__(sql=sql, **kwargs)
         if bigquery_conn_id:
             warnings.warn(_DEPRECATION_MSG, DeprecationWarning, stacklevel=3)
             gcp_conn_id = bigquery_conn_id
@@ -205,14 +204,13 @@ class BigQueryValueCheckOperator(ValueCheckOperator):
         bigquery_conn_id: Optional[str] = None,
         use_legacy_sql: bool = True,
         location: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
         super().__init__(
             sql=sql,
             pass_value=pass_value,
             tolerance=tolerance,
-            *args, **kwargs
+            **kwargs
         )
 
         if bigquery_conn_id:
@@ -281,7 +279,6 @@ class BigQueryIntervalCheckOperator(IntervalCheckOperator):
         bigquery_conn_id: Optional[str] = None,
         use_legacy_sql: bool = True,
         location: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
         super().__init__(
@@ -289,7 +286,7 @@ class BigQueryIntervalCheckOperator(IntervalCheckOperator):
             metrics_thresholds=metrics_thresholds,
             date_filter_column=date_filter_column,
             days_back=days_back,
-            *args, **kwargs
+            **kwargs
         )
 
         if bigquery_conn_id:
@@ -376,10 +373,9 @@ class BigQueryGetDataOperator(BaseOperator):
         bigquery_conn_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
         location: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         if bigquery_conn_id:
             warnings.warn(
@@ -554,9 +550,8 @@ class BigQueryExecuteQueryOperator(BaseOperator):
                  cluster_fields: Optional[List[str]] = None,
                  location: Optional[str] = None,
                  encryption_configuration: Optional[dict] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         if bigquery_conn_id:
             warnings.warn(
@@ -804,9 +799,9 @@ class BigQueryCreateEmptyTableOperator(BaseOperator):
         encryption_configuration: Optional[Dict] = None,
         location: Optional[str] = None,
         cluster_fields: Optional[List[str]] = None,
-        *args, **kwargs
+        **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.project_id = project_id
         self.dataset_id = dataset_id
@@ -989,9 +984,9 @@ class BigQueryCreateExternalTableOperator(BaseOperator):
         labels: Optional[Dict] = None,
         encryption_configuration: Optional[Dict] = None,
         location: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         # GCS config
         self.bucket = bucket
@@ -1142,7 +1137,7 @@ class BigQueryDeleteDatasetOperator(BaseOperator):
         gcp_conn_id: str = 'google_cloud_default',
         bigquery_conn_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ) -> None:
         if bigquery_conn_id:
             warnings.warn(
@@ -1156,7 +1151,7 @@ class BigQueryDeleteDatasetOperator(BaseOperator):
         self.gcp_conn_id = gcp_conn_id
         self.delegate_to = delegate_to
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def execute(self, context):
         self.log.info('Dataset id: %s Project id: %s', self.dataset_id, self.project_id)
@@ -1222,7 +1217,7 @@ class BigQueryCreateEmptyDatasetOperator(BaseOperator):
                  gcp_conn_id: str = 'google_cloud_default',
                  bigquery_conn_id: Optional[str] = None,
                  delegate_to: Optional[str] = None,
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
 
         if bigquery_conn_id:
             warnings.warn(
@@ -1237,7 +1232,7 @@ class BigQueryCreateEmptyDatasetOperator(BaseOperator):
         self.dataset_reference = dataset_reference if dataset_reference else {}
         self.delegate_to = delegate_to
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def execute(self, context):
         bq_hook = BigQueryHook(
@@ -1288,12 +1283,12 @@ class BigQueryGetDatasetOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.dataset_id = dataset_id
         self.project_id = project_id
         self.gcp_conn_id = gcp_conn_id
         self.delegate_to = delegate_to
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def execute(self, context):
         bq_hook = BigQueryHook(gcp_conn_id=self.gcp_conn_id,
@@ -1340,14 +1335,14 @@ class BigQueryGetDatasetTablesOperator(BaseOperator):
         max_results: Optional[int] = None,
         gcp_conn_id: Optional[str] = 'google_cloud_default',
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ) -> None:
         self.dataset_id = dataset_id
         self.project_id = project_id
         self.max_results = max_results
         self.gcp_conn_id = gcp_conn_id
         self.delegate_to = delegate_to
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def execute(self, context):
         bq_hook = BigQueryHook(
@@ -1397,7 +1392,7 @@ class BigQueryPatchDatasetOperator(BaseOperator):
         project_id: Optional[str] = None,
         gcp_conn_id: str = 'google_cloud_default',
         delegate_to: Optional[str] = None,
-        *args, **kwargs,
+        **kwargs,
     ) -> None:
 
         warnings.warn(
@@ -1409,7 +1404,7 @@ class BigQueryPatchDatasetOperator(BaseOperator):
         self.gcp_conn_id = gcp_conn_id
         self.dataset_resource = dataset_resource
         self.delegate_to = delegate_to
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def execute(self, context):
         bq_hook = BigQueryHook(
@@ -1465,7 +1460,7 @@ class BigQueryUpdateDatasetOperator(BaseOperator):
         project_id: Optional[str] = None,
         gcp_conn_id: str = 'google_cloud_default',
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ) -> None:
         self.dataset_id = dataset_id
         self.project_id = project_id
@@ -1473,7 +1468,7 @@ class BigQueryUpdateDatasetOperator(BaseOperator):
         self.gcp_conn_id = gcp_conn_id
         self.dataset_resource = dataset_resource
         self.delegate_to = delegate_to
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def execute(self, context):
         bq_hook = BigQueryHook(
@@ -1530,10 +1525,9 @@ class BigQueryDeleteTableOperator(BaseOperator):
         delegate_to: Optional[str] = None,
         ignore_if_missing: bool = False,
         location: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         if bigquery_conn_id:
             warnings.warn(
@@ -1603,10 +1597,9 @@ class BigQueryUpsertTableOperator(BaseOperator):
         bigquery_conn_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
         location: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         if bigquery_conn_id:
             warnings.warn(
@@ -1675,10 +1668,9 @@ class BigQueryInsertJobOperator(BaseOperator):
         job_id: Optional[str] = None,
         gcp_conn_id: str = 'google_cloud_default',
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.configuration = configuration
         self.location = location
         self.job_id = job_id
diff --git a/airflow/providers/google/cloud/operators/bigquery_dts.py b/airflow/providers/google/cloud/operators/bigquery_dts.py
index 0c90450..2472ae2 100644
--- a/airflow/providers/google/cloud/operators/bigquery_dts.py
+++ b/airflow/providers/google/cloud/operators/bigquery_dts.py
@@ -74,10 +74,9 @@ class BigQueryCreateDataTransferOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id="google_cloud_default",
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.transfer_config = transfer_config
         self.authorization_code = authorization_code
         self.project_id = project_id
@@ -140,10 +139,9 @@ class BigQueryDeleteDataTransferConfigOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id="google_cloud_default",
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.transfer_config_id = transfer_config_id
         self.retry = retry
@@ -219,10 +217,9 @@ class BigQueryDataTransferServiceStartTransferRunsOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id="google_cloud_default",
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.transfer_config_id = transfer_config_id
         self.requested_time_range = requested_time_range
diff --git a/airflow/providers/google/cloud/operators/bigtable.py b/airflow/providers/google/cloud/operators/bigtable.py
index a4ed5d9..09ab265 100644
--- a/airflow/providers/google/cloud/operators/bigtable.py
+++ b/airflow/providers/google/cloud/operators/bigtable.py
@@ -108,7 +108,7 @@ class BigtableCreateInstanceOperator(BaseOperator, BigtableValidationMixin):
                  cluster_storage_type: Optional[enums.StorageType] = None,
                  timeout: Optional[float] = None,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.project_id = project_id
         self.instance_id = instance_id
         self.main_cluster_id = main_cluster_id
@@ -123,7 +123,7 @@ class BigtableCreateInstanceOperator(BaseOperator, BigtableValidationMixin):
         self.timeout = timeout
         self._validate_inputs()
         self.gcp_conn_id = gcp_conn_id
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def execute(self, context):
         hook = BigtableHook(gcp_conn_id=self.gcp_conn_id)
@@ -185,12 +185,12 @@ class BigtableDeleteInstanceOperator(BaseOperator, BigtableValidationMixin):
                  instance_id: str,
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.project_id = project_id
         self.instance_id = instance_id
         self._validate_inputs()
         self.gcp_conn_id = gcp_conn_id
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def execute(self, context):
         hook = BigtableHook(gcp_conn_id=self.gcp_conn_id)
@@ -248,7 +248,7 @@ class BigtableCreateTableOperator(BaseOperator, BigtableValidationMixin):
                  initial_split_keys: Optional[List] = None,
                  column_families: Optional[Dict[str, GarbageCollectionRule]] = None,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.project_id = project_id
         self.instance_id = instance_id
         self.table_id = table_id
@@ -256,7 +256,7 @@ class BigtableCreateTableOperator(BaseOperator, BigtableValidationMixin):
         self.column_families = column_families or {}
         self._validate_inputs()
         self.gcp_conn_id = gcp_conn_id
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _compare_column_families(self, hook, instance):
         table_column_families = hook.get_column_families_for_table(instance, self.table_id)
@@ -337,14 +337,14 @@ class BigtableDeleteTableOperator(BaseOperator, BigtableValidationMixin):
                  project_id: Optional[str] = None,
                  app_profile_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.project_id = project_id
         self.instance_id = instance_id
         self.table_id = table_id
         self.app_profile_id = app_profile_id
         self._validate_inputs()
         self.gcp_conn_id = gcp_conn_id
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def execute(self, context):
         hook = BigtableHook(gcp_conn_id=self.gcp_conn_id)
@@ -402,14 +402,14 @@ class BigtableUpdateClusterOperator(BaseOperator, BigtableValidationMixin):
                  nodes: int,
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.project_id = project_id
         self.instance_id = instance_id
         self.cluster_id = cluster_id
         self.nodes = nodes
         self._validate_inputs()
         self.gcp_conn_id = gcp_conn_id
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def execute(self, context):
         hook = BigtableHook(gcp_conn_id=self.gcp_conn_id)
diff --git a/airflow/providers/google/cloud/operators/cloud_build.py b/airflow/providers/google/cloud/operators/cloud_build.py
index fad1f06..d1140e2 100644
--- a/airflow/providers/google/cloud/operators/cloud_build.py
+++ b/airflow/providers/google/cloud/operators/cloud_build.py
@@ -189,8 +189,8 @@ class CloudBuildCreateBuildOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = "google_cloud_default",
                  api_version: str = "v1",
-                 *args, **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+                 **kwargs) -> None:
+        super().__init__(**kwargs)
         self.body = body
         # Not template fields to keep original value
         self.body_raw = body
diff --git a/airflow/providers/google/cloud/operators/cloud_memorystore.py b/airflow/providers/google/cloud/operators/cloud_memorystore.py
index e4614b4..248a769 100644
--- a/airflow/providers/google/cloud/operators/cloud_memorystore.py
+++ b/airflow/providers/google/cloud/operators/cloud_memorystore.py
@@ -92,10 +92,9 @@ class CloudMemorystoreCreateInstanceOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.instance_id = instance_id
         self.instance = instance
@@ -158,10 +157,9 @@ class CloudMemorystoreDeleteInstanceOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.instance = instance
         self.project_id = project_id
@@ -237,10 +235,9 @@ class CloudMemorystoreExportInstanceOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.instance = instance
         self.output_config = output_config
@@ -316,10 +313,9 @@ class CloudMemorystoreFailoverInstanceOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.instance = instance
         self.data_protection_mode = data_protection_mode
@@ -380,10 +376,9 @@ class CloudMemorystoreGetInstanceOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.instance = instance
         self.project_id = project_id
@@ -461,10 +456,9 @@ class CloudMemorystoreImportOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.instance = instance
         self.input_config = input_config
@@ -529,10 +523,9 @@ class CloudMemorystoreListInstancesOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.page_size = page_size
         self.project_id = project_id
@@ -623,10 +616,9 @@ class CloudMemorystoreUpdateInstanceOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.update_mask = update_mask
         self.instance = instance
         self.location = location
@@ -702,10 +694,9 @@ class CloudMemorystoreScaleInstanceOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.memory_size_gb = memory_size_gb
         self.location = location
         self.instance_id = instance_id
@@ -802,10 +793,9 @@ class CloudMemorystoreCreateInstanceAndImportOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.instance_id = instance_id
         self.instance = instance
@@ -896,10 +886,9 @@ class CloudMemorystoreExportAndDeleteInstanceOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.instance = instance
         self.output_config = output_config
diff --git a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py
index 96bbc3b..65dfda1 100644
--- a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py
+++ b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py
@@ -202,10 +202,9 @@ class CloudDataTransferServiceCreateJobOperator(BaseOperator):
         aws_conn_id: str = 'aws_default',
         gcp_conn_id: str = 'google_cloud_default',
         api_version: str = 'v1',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.body = deepcopy(body)
         self.aws_conn_id = aws_conn_id
         self.gcp_conn_id = gcp_conn_id
@@ -262,10 +261,9 @@ class CloudDataTransferServiceUpdateJobOperator(BaseOperator):
         aws_conn_id: str = 'aws_default',
         gcp_conn_id: str = 'google_cloud_default',
         api_version: str = 'v1',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.job_name = job_name
         self.body = body
         self.gcp_conn_id = gcp_conn_id
@@ -318,10 +316,9 @@ class CloudDataTransferServiceDeleteJobOperator(BaseOperator):
         gcp_conn_id: str = "google_cloud_default",
         api_version: str = "v1",
         project_id: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.job_name = job_name
         self.project_id = project_id
         self.gcp_conn_id = gcp_conn_id
@@ -365,10 +362,9 @@ class CloudDataTransferServiceGetOperationOperator(BaseOperator):
         operation_name: str,
         gcp_conn_id: str = "google_cloud_default",
         api_version: str = "v1",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.operation_name = operation_name
         self.gcp_conn_id = gcp_conn_id
         self.api_version = api_version
@@ -410,7 +406,6 @@ class CloudDataTransferServiceListOperationsOperator(BaseOperator):
                  request_filter: Optional[Dict] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  api_version: str = 'v1',
-                 *args,
                  **kwargs) -> None:
         # To preserve backward compatibility
         # TODO: remove one day
@@ -421,7 +416,7 @@ class CloudDataTransferServiceListOperationsOperator(BaseOperator):
             else:
                 TypeError("__init__() missing 1 required positional argument: 'request_filter'")
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.filter = request_filter
         self.gcp_conn_id = gcp_conn_id
         self.api_version = api_version
@@ -463,10 +458,9 @@ class CloudDataTransferServicePauseOperationOperator(BaseOperator):
         operation_name: str,
         gcp_conn_id: str = "google_cloud_default",
         api_version: str = "v1",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.operation_name = operation_name
         self.gcp_conn_id = gcp_conn_id
         self.api_version = api_version
@@ -506,14 +500,13 @@ class CloudDataTransferServiceResumeOperationOperator(BaseOperator):
         operation_name: str,
         gcp_conn_id: str = "google_cloud_default",
         api_version: str = "v1",
-        *args,
         **kwargs
     ) -> None:
         self.operation_name = operation_name
         self.gcp_conn_id = gcp_conn_id
         self.api_version = api_version
         self._validate_inputs()
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _validate_inputs(self):
         if not self.operation_name:
@@ -550,10 +543,9 @@ class CloudDataTransferServiceCancelOperationOperator(BaseOperator):
         operation_name: str,
         gcp_conn_id: str = "google_cloud_default",
         api_version: str = "v1",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.operation_name = operation_name
         self.api_version = api_version
         self.gcp_conn_id = gcp_conn_id
@@ -648,11 +640,10 @@ class CloudDataTransferServiceS3ToGCSOperator(BaseOperator):
         transfer_options: Optional[Dict] = None,
         wait: bool = True,
         timeout: Optional[float] = None,
-        *args,
         **kwargs
     ) -> None:
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.s3_bucket = s3_bucket
         self.gcs_bucket = gcs_bucket
         self.project_id = project_id
@@ -790,11 +781,10 @@ class CloudDataTransferServiceGCSToGCSOperator(BaseOperator):
         transfer_options: Optional[Dict] = None,
         wait: bool = True,
         timeout: Optional[float] = None,
-        *args,
         **kwargs
     ) -> None:
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.source_bucket = source_bucket
         self.destination_bucket = destination_bucket
         self.project_id = project_id
diff --git a/airflow/providers/google/cloud/operators/datacatalog.py b/airflow/providers/google/cloud/operators/datacatalog.py
index 5c3e064..94c8191 100644
--- a/airflow/providers/google/cloud/operators/datacatalog.py
+++ b/airflow/providers/google/cloud/operators/datacatalog.py
@@ -92,10 +92,9 @@ class CloudDataCatalogCreateEntryOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.entry_group = entry_group
         self.entry_id = entry_id
@@ -195,10 +194,9 @@ class CloudDataCatalogCreateEntryGroupOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.entry_group_id = entry_group_id
         self.entry_group = entry_group
@@ -301,10 +299,9 @@ class CloudDataCatalogCreateTagOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.entry_group = entry_group
         self.entry = entry
@@ -416,10 +413,9 @@ class CloudDataCatalogCreateTagTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.tag_template_id = tag_template_id
         self.tag_template = tag_template
@@ -521,10 +517,9 @@ class CloudDataCatalogCreateTagTemplateFieldOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.tag_template = tag_template
         self.tag_template_field_id = tag_template_field_id
@@ -617,10 +612,9 @@ class CloudDataCatalogDeleteEntryOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.entry_group = entry_group
         self.entry = entry
@@ -688,10 +682,9 @@ class CloudDataCatalogDeleteEntryGroupOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.entry_group = entry_group
         self.project_id = project_id
@@ -771,10 +764,9 @@ class CloudDataCatalogDeleteTagOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.entry_group = entry_group
         self.entry = entry
@@ -856,10 +848,9 @@ class CloudDataCatalogDeleteTagTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.tag_template = tag_template
         self.force = force
@@ -941,10 +932,9 @@ class CloudDataCatalogDeleteTagTemplateFieldOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.tag_template = tag_template
         self.field = field
@@ -1024,10 +1014,9 @@ class CloudDataCatalogGetEntryOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.entry_group = entry_group
         self.entry = entry
@@ -1106,10 +1095,9 @@ class CloudDataCatalogGetEntryGroupOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.entry_group = entry_group
         self.read_mask = read_mask
@@ -1181,10 +1169,9 @@ class CloudDataCatalogGetTagTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.tag_template = tag_template
         self.project_id = project_id
@@ -1264,10 +1251,9 @@ class CloudDataCatalogListTagsOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.entry_group = entry_group
         self.entry = entry
@@ -1343,10 +1329,9 @@ class CloudDataCatalogLookupEntryOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.linked_resource = linked_resource
         self.sql_resource = sql_resource
         self.project_id = project_id
@@ -1425,10 +1410,9 @@ class CloudDataCatalogRenameTagTemplateFieldOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.tag_template = tag_template
         self.field = field
@@ -1532,10 +1516,9 @@ class CloudDataCatalogSearchCatalogOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.scope = scope
         self.query = query
         self.page_size = page_size
@@ -1626,10 +1609,9 @@ class CloudDataCatalogUpdateEntryOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.entry = entry
         self.update_mask = update_mask
         self.location = location
@@ -1725,10 +1707,9 @@ class CloudDataCatalogUpdateTagOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.tag = tag
         self.update_mask = update_mask
         self.location = location
@@ -1825,10 +1806,9 @@ class CloudDataCatalogUpdateTagTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.tag_template = tag_template
         self.update_mask = update_mask
         self.location = location
@@ -1931,10 +1911,9 @@ class CloudDataCatalogUpdateTagTemplateFieldOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.tag_template_field_name = tag_template_field_name
         self.location = location
         self.tag_template = tag_template
diff --git a/airflow/providers/google/cloud/operators/dataflow.py b/airflow/providers/google/cloud/operators/dataflow.py
index 04bf297..9cf01ab 100644
--- a/airflow/providers/google/cloud/operators/dataflow.py
+++ b/airflow/providers/google/cloud/operators/dataflow.py
@@ -193,9 +193,8 @@ class DataflowCreateJavaJobOperator(BaseOperator):
             job_class: Optional[str] = None,
             check_if_running: CheckJobRunning = CheckJobRunning.WaitForRun,
             multiple_jobs: Optional[bool] = None,
-            *args,
             **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         dataflow_default_options = dataflow_default_options or {}
         options = options or {}
@@ -376,9 +375,8 @@ class DataflowTemplatedJobStartOperator(BaseOperator):
             gcp_conn_id: str = 'google_cloud_default',
             delegate_to: Optional[str] = None,
             poll_sleep: int = 10,
-            *args,
             **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.template = template
         self.job_name = job_name
         self.options = options or {}
@@ -508,10 +506,9 @@ class DataflowCreatePythonJobOperator(BaseOperator):
             gcp_conn_id: str = 'google_cloud_default',
             delegate_to: Optional[str] = None,
             poll_sleep: int = 10,
-            *args,
             **kwargs) -> None:
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.py_file = py_file
         self.job_name = job_name
diff --git a/airflow/providers/google/cloud/operators/datafusion.py b/airflow/providers/google/cloud/operators/datafusion.py
index 093721d..3b4aed9 100644
--- a/airflow/providers/google/cloud/operators/datafusion.py
+++ b/airflow/providers/google/cloud/operators/datafusion.py
@@ -64,10 +64,9 @@ class CloudDataFusionRestartInstanceOperator(BaseOperator):
         api_version: str = "v1beta1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.instance_name = instance_name
         self.location = location
         self.project_id = project_id
@@ -125,10 +124,9 @@ class CloudDataFusionDeleteInstanceOperator(BaseOperator):
         api_version: str = "v1beta1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.instance_name = instance_name
         self.location = location
         self.project_id = project_id
@@ -190,10 +188,9 @@ class CloudDataFusionCreateInstanceOperator(BaseOperator):
         api_version: str = "v1beta1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.instance_name = instance_name
         self.instance = instance
         self.location = location
@@ -282,10 +279,9 @@ class CloudDataFusionUpdateInstanceOperator(BaseOperator):
         api_version: str = "v1beta1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.update_mask = update_mask
         self.instance_name = instance_name
         self.instance = instance
@@ -347,10 +343,9 @@ class CloudDataFusionGetInstanceOperator(BaseOperator):
         api_version: str = "v1beta1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.instance_name = instance_name
         self.location = location
         self.project_id = project_id
@@ -417,10 +412,9 @@ class CloudDataFusionCreatePipelineOperator(BaseOperator):
         api_version: str = "v1beta1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.pipeline_name = pipeline_name
         self.pipeline = pipeline
         self.namespace = namespace
@@ -496,10 +490,9 @@ class CloudDataFusionDeletePipelineOperator(BaseOperator):
         api_version: str = "v1beta1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.pipeline_name = pipeline_name
         self.version_id = version_id
         self.namespace = namespace
@@ -576,10 +569,9 @@ class CloudDataFusionListPipelinesOperator(BaseOperator):
         api_version: str = "v1beta1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.artifact_version = artifact_version
         self.artifact_name = artifact_name
         self.namespace = namespace
@@ -664,10 +656,9 @@ class CloudDataFusionStartPipelineOperator(BaseOperator):
         api_version: str = "v1beta1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.pipeline_name = pipeline_name
         self.success_states = success_states
         self.runtime_args = runtime_args
@@ -752,10 +743,9 @@ class CloudDataFusionStopPipelineOperator(BaseOperator):
         api_version: str = "v1beta1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.pipeline_name = pipeline_name
         self.namespace = namespace
         self.instance_name = instance_name
diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py
index 565c7e6..5969fee 100644
--- a/airflow/providers/google/cloud/operators/dataproc.py
+++ b/airflow/providers/google/cloud/operators/dataproc.py
@@ -191,7 +191,6 @@ class ClusterGenerator:
                  auto_delete_time: Optional[datetime] = None,
                  auto_delete_ttl: Optional[int] = None,
                  customer_managed_key: Optional[str] = None,
-                 *args,  # just in case
                  **kwargs
                  ) -> None:
 
@@ -465,7 +464,6 @@ class DataprocCreateClusterOperator(BaseOperator):
                  timeout: Optional[float] = None,
                  metadata: Optional[Sequence[Tuple[str, str]]] = None,
                  gcp_conn_id: str = "google_cloud_default",
-                 *args,
                  **kwargs) -> None:
         # TODO: remove one day
         if cluster is None:
@@ -491,7 +489,7 @@ class DataprocCreateClusterOperator(BaseOperator):
                 if arg in kwargs:
                     del kwargs[arg]
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.cluster = cluster
         self.cluster_name = cluster.get('cluster_name')
@@ -580,9 +578,8 @@ class DataprocScaleClusterOperator(BaseOperator):
                  num_preemptible_workers: int = 0,
                  graceful_decommission_timeout: Optional[str] = None,
                  gcp_conn_id: str = "google_cloud_default",
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.region = region
         self.cluster_name = cluster_name
@@ -708,10 +705,9 @@ class DataprocDeleteClusterOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.region = region
         self.cluster_name = cluster_name
@@ -793,9 +789,8 @@ class DataprocJobBaseOperator(BaseOperator):
                  labels: Optional[Dict] = None,
                  region: str = 'global',
                  job_error_states: Optional[Set[str]] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.gcp_conn_id = gcp_conn_id
         self.delegate_to = delegate_to
         self.labels = labels
@@ -920,7 +915,6 @@ class DataprocSubmitPigJobOperator(DataprocJobBaseOperator):
         query: Optional[str] = None,
         query_uri: Optional[str] = None,
         variables: Optional[Dict] = None,
-        *args,
         **kwargs
     ) -> None:
         # TODO: Remove one day
@@ -932,7 +926,7 @@ class DataprocSubmitPigJobOperator(DataprocJobBaseOperator):
             stacklevel=1
         )
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.query = query
         self.query_uri = query_uri
         self.variables = variables
@@ -986,7 +980,6 @@ class DataprocSubmitHiveJobOperator(DataprocJobBaseOperator):
         query: Optional[str] = None,
         query_uri: Optional[str] = None,
         variables: Optional[Dict] = None,
-        *args,
         **kwargs
     ) -> None:
         # TODO: Remove one day
@@ -998,7 +991,7 @@ class DataprocSubmitHiveJobOperator(DataprocJobBaseOperator):
             stacklevel=1
         )
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.query = query
         self.query_uri = query_uri
         self.variables = variables
@@ -1052,7 +1045,6 @@ class DataprocSubmitSparkSqlJobOperator(DataprocJobBaseOperator):
         query: Optional[str] = None,
         query_uri: Optional[str] = None,
         variables: Optional[Dict] = None,
-        *args,
         **kwargs
     ) -> None:
         # TODO: Remove one day
@@ -1064,7 +1056,7 @@ class DataprocSubmitSparkSqlJobOperator(DataprocJobBaseOperator):
             stacklevel=1
         )
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.query = query
         self.query_uri = query_uri
         self.variables = variables
@@ -1127,7 +1119,6 @@ class DataprocSubmitSparkJobOperator(DataprocJobBaseOperator):
         arguments: Optional[List] = None,
         archives: Optional[List] = None,
         files: Optional[List] = None,
-        *args,
         **kwargs
     ) -> None:
         # TODO: Remove one day
@@ -1139,7 +1130,7 @@ class DataprocSubmitSparkJobOperator(DataprocJobBaseOperator):
             stacklevel=1
         )
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.main_jar = main_jar
         self.main_class = main_class
         self.arguments = arguments
@@ -1200,7 +1191,6 @@ class DataprocSubmitHadoopJobOperator(DataprocJobBaseOperator):
         arguments: Optional[List] = None,
         archives: Optional[List] = None,
         files: Optional[List] = None,
-        *args,
         **kwargs
     ) -> None:
         # TODO: Remove one day
@@ -1212,7 +1202,7 @@ class DataprocSubmitHadoopJobOperator(DataprocJobBaseOperator):
             stacklevel=1
         )
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.main_jar = main_jar
         self.main_class = main_class
         self.arguments = arguments
@@ -1300,7 +1290,6 @@ class DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
         archives: Optional[List] = None,
         pyfiles: Optional[List] = None,
         files: Optional[List] = None,
-        *args,
         **kwargs
     ) -> None:
         # TODO: Remove one day
@@ -1312,7 +1301,7 @@ class DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
             stacklevel=1
         )
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.main = main
         self.arguments = arguments
         self.archives = archives
@@ -1424,10 +1413,9 @@ class DataprocInstantiateWorkflowTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.template_id = template_id
         self.parameters = parameters
@@ -1513,10 +1501,9 @@ class DataprocInstantiateInlineWorkflowTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.template = template
         self.project_id = project_id
         self.location = region
@@ -1585,10 +1572,9 @@ class DataprocSubmitJobOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.location = location
         self.job = job
@@ -1677,10 +1663,9 @@ class DataprocUpdateClusterOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.location = location
         self.cluster_name = cluster_name
diff --git a/airflow/providers/google/cloud/operators/datastore.py b/airflow/providers/google/cloud/operators/datastore.py
index 4e8dcee..b99e884 100644
--- a/airflow/providers/google/cloud/operators/datastore.py
+++ b/airflow/providers/google/cloud/operators/datastore.py
@@ -73,9 +73,8 @@ class CloudDatastoreExportEntitiesOperator(BaseOperator):
                  polling_interval_in_seconds: int = 10,
                  overwrite_existing: bool = False,
                  project_id: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.datastore_conn_id = datastore_conn_id
         self.cloud_storage_conn_id = cloud_storage_conn_id
         self.delegate_to = delegate_to
@@ -157,9 +156,8 @@ class CloudDatastoreImportEntitiesOperator(BaseOperator):
                  delegate_to: Optional[str] = None,
                  polling_interval_in_seconds: float = 10,
                  project_id: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.datastore_conn_id = datastore_conn_id
         self.delegate_to = delegate_to
         self.bucket = bucket
diff --git a/airflow/providers/google/cloud/operators/dlp.py b/airflow/providers/google/cloud/operators/dlp.py
index 9617e8e..fac5c29 100644
--- a/airflow/providers/google/cloud/operators/dlp.py
+++ b/airflow/providers/google/cloud/operators/dlp.py
@@ -72,10 +72,9 @@ class CloudDLPCancelDLPJobOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.dlp_job_id = dlp_job_id
         self.project_id = project_id
         self.retry = retry
@@ -143,10 +142,9 @@ class CloudDLPCreateDeidentifyTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.organization_id = organization_id
         self.project_id = project_id
         self.deidentify_template = deidentify_template
@@ -226,10 +224,9 @@ class CloudDLPCreateDLPJobOperator(BaseOperator):
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         wait_until_finished: bool = True,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.inspect_job = inspect_job
         self.risk_job = risk_job
@@ -313,10 +310,9 @@ class CloudDLPCreateInspectTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.organization_id = organization_id
         self.project_id = project_id
         self.inspect_template = inspect_template
@@ -389,10 +385,9 @@ class CloudDLPCreateJobTriggerOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.job_trigger = job_trigger
         self.trigger_id = trigger_id
@@ -471,10 +466,9 @@ class CloudDLPCreateStoredInfoTypeOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.organization_id = organization_id
         self.project_id = project_id
         self.config = config
@@ -570,10 +564,9 @@ class CloudDLPDeidentifyContentOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.deidentify_config = deidentify_config
         self.inspect_config = inspect_config
@@ -639,10 +632,9 @@ class CloudDLPDeleteDeidentifyTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.template_id = template_id
         self.organization_id = organization_id
         self.project_id = project_id
@@ -701,10 +693,9 @@ class CloudDLPDeleteDLPJobOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.dlp_job_id = dlp_job_id
         self.project_id = project_id
         self.retry = retry
@@ -764,10 +755,9 @@ class CloudDLPDeleteInspectTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.template_id = template_id
         self.organization_id = organization_id
         self.project_id = project_id
@@ -825,10 +815,9 @@ class CloudDLPDeleteJobTriggerOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.job_trigger_id = job_trigger_id
         self.project_id = project_id
         self.retry = retry
@@ -893,10 +882,9 @@ class CloudDLPDeleteStoredInfoTypeOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.stored_info_type_id = stored_info_type_id
         self.organization_id = organization_id
         self.project_id = project_id
@@ -959,10 +947,9 @@ class CloudDLPGetDeidentifyTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.template_id = template_id
         self.organization_id = organization_id
         self.project_id = project_id
@@ -1019,10 +1006,9 @@ class CloudDLPGetDLPJobOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.dlp_job_id = dlp_job_id
         self.project_id = project_id
         self.retry = retry
@@ -1081,10 +1067,9 @@ class CloudDLPGetInspectTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.template_id = template_id
         self.organization_id = organization_id
         self.project_id = project_id
@@ -1141,10 +1126,9 @@ class CloudDLPGetDLPJobTriggerOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.job_trigger_id = job_trigger_id
         self.project_id = project_id
         self.retry = retry
@@ -1208,10 +1192,9 @@ class CloudDLPGetStoredInfoTypeOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.stored_info_type_id = stored_info_type_id
         self.organization_id = organization_id
         self.project_id = project_id
@@ -1283,10 +1266,9 @@ class CloudDLPInspectContentOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.inspect_config = inspect_config
         self.item = item
@@ -1354,10 +1336,9 @@ class CloudDLPListDeidentifyTemplatesOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.organization_id = organization_id
         self.project_id = project_id
         self.page_size = page_size
@@ -1427,10 +1408,9 @@ class CloudDLPListDLPJobsOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.results_filter = results_filter
         self.page_size = page_size
@@ -1491,10 +1471,9 @@ class CloudDLPListInfoTypesOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.language_code = language_code
         self.results_filter = results_filter
         self.retry = retry
@@ -1558,10 +1537,9 @@ class CloudDLPListInspectTemplatesOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.organization_id = organization_id
         self.project_id = project_id
         self.page_size = page_size
@@ -1628,10 +1606,9 @@ class CloudDLPListJobTriggersOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.page_size = page_size
         self.order_by = order_by
@@ -1699,10 +1676,9 @@ class CloudDLPListStoredInfoTypesOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.organization_id = organization_id
         self.project_id = project_id
         self.page_size = page_size
@@ -1782,10 +1758,9 @@ class CloudDLPRedactImageOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.inspect_config = inspect_config
         self.image_redaction_configs = image_redaction_configs
@@ -1870,10 +1845,9 @@ class CloudDLPReidentifyContentOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.reidentify_config = reidentify_config
         self.inspect_config = inspect_config
@@ -1953,10 +1927,9 @@ class CloudDLPUpdateDeidentifyTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.template_id = template_id
         self.organization_id = organization_id
         self.project_id = project_id
@@ -2034,10 +2007,9 @@ class CloudDLPUpdateInspectTemplateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.template_id = template_id
         self.organization_id = organization_id
         self.project_id = project_id
@@ -2110,10 +2082,9 @@ class CloudDLPUpdateJobTriggerOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.job_trigger_id = job_trigger_id
         self.project_id = project_id
         self.job_trigger = job_trigger
@@ -2190,10 +2161,9 @@ class CloudDLPUpdateStoredInfoTypeOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.stored_info_type_id = stored_info_type_id
         self.organization_id = organization_id
         self.project_id = project_id
diff --git a/airflow/providers/google/cloud/operators/functions.py b/airflow/providers/google/cloud/operators/functions.py
index 152ba3f..ba97a32 100644
--- a/airflow/providers/google/cloud/operators/functions.py
+++ b/airflow/providers/google/cloud/operators/functions.py
@@ -130,7 +130,7 @@ class CloudFunctionDeployFunctionOperator(BaseOperator):
                  api_version: str = 'v1',
                  zip_path: Optional[str] = None,
                  validate_body: bool = True,
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.project_id = project_id
         self.location = location
         self.body = body
@@ -143,7 +143,7 @@ class CloudFunctionDeployFunctionOperator(BaseOperator):
             self._field_validator = GcpBodyFieldValidator(CLOUD_FUNCTION_VALIDATION,
                                                           api_version=api_version)
         self._validate_inputs()
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _validate_inputs(self):
         if not self.location:
@@ -318,12 +318,12 @@ class CloudFunctionDeleteFunctionOperator(BaseOperator):
                  name: str,
                  gcp_conn_id: str = 'google_cloud_default',
                  api_version: str = 'v1',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.name = name
         self.gcp_conn_id = gcp_conn_id
         self.api_version = api_version
         self._validate_inputs()
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _validate_inputs(self):
         if not self.name:
@@ -378,10 +378,9 @@ class CloudFunctionInvokeFunctionOperator(BaseOperator):
         project_id: Optional[str] = None,
         gcp_conn_id: str = 'google_cloud_default',
         api_version: str = 'v1',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.function_id = function_id
         self.input_data = input_data
         self.location = location
diff --git a/airflow/providers/google/cloud/operators/gcs.py b/airflow/providers/google/cloud/operators/gcs.py
index fa24ad4..b7b5d8d 100644
--- a/airflow/providers/google/cloud/operators/gcs.py
+++ b/airflow/providers/google/cloud/operators/gcs.py
@@ -110,9 +110,8 @@ class GCSCreateBucketOperator(BaseOperator):
                  gcp_conn_id: str = 'google_cloud_default',
                  google_cloud_storage_conn_id: Optional[str] = None,
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         if google_cloud_storage_conn_id:
             warnings.warn(
@@ -195,9 +194,8 @@ class GCSListObjectsOperator(BaseOperator):
                  gcp_conn_id: str = 'google_cloud_default',
                  google_cloud_storage_conn_id: Optional[str] = None,
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         if google_cloud_storage_conn_id:
             warnings.warn(
@@ -260,7 +258,7 @@ class GCSDeleteObjectsOperator(BaseOperator):
                  gcp_conn_id: str = 'google_cloud_default',
                  google_cloud_storage_conn_id: Optional[str] = None,
                  delegate_to: Optional[str] = None,
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
 
         if google_cloud_storage_conn_id:
             warnings.warn(
@@ -277,7 +275,7 @@ class GCSDeleteObjectsOperator(BaseOperator):
         if not objects and not prefix:
             raise ValueError("Either object or prefix should be set. Both are None")
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def execute(self, context):
         hook = GCSHook(
@@ -337,10 +335,9 @@ class GCSBucketCreateAclEntryOperator(BaseOperator):
         user_project: Optional[str] = None,
         gcp_conn_id: str = 'google_cloud_default',
         google_cloud_storage_conn_id: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         if google_cloud_storage_conn_id:
             warnings.warn(
@@ -408,8 +405,8 @@ class GCSObjectCreateAclEntryOperator(BaseOperator):
                  user_project: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  google_cloud_storage_conn_id: Optional[str] = None,
-                 *args, **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+                 **kwargs) -> None:
+        super().__init__(**kwargs)
 
         if google_cloud_storage_conn_id:
             warnings.warn(
@@ -473,10 +470,9 @@ class GCSFileTransformOperator(BaseOperator):
         destination_bucket: Optional[str] = None,
         destination_object: Optional[str] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.source_bucket = source_bucket
         self.source_object = source_object
         self.destination_bucket = destination_bucket or self.source_bucket
@@ -556,8 +552,8 @@ class GCSDeleteBucketOperator(BaseOperator):
                  bucket_name: str,
                  force: bool = True,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+                 **kwargs) -> None:
+        super().__init__(**kwargs)
 
         self.bucket_name = bucket_name
         self.force: bool = force
@@ -629,10 +625,9 @@ class GCSSynchronizeBucketsOperator(BaseOperator):
         allow_overwrite: bool = False,
         gcp_conn_id: str = 'google_cloud_default',
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.source_bucket = source_bucket
         self.destination_bucket = destination_bucket
         self.source_object = source_object
diff --git a/airflow/providers/google/cloud/operators/kubernetes_engine.py b/airflow/providers/google/cloud/operators/kubernetes_engine.py
index f97593f..e2bd8ff 100644
--- a/airflow/providers/google/cloud/operators/kubernetes_engine.py
+++ b/airflow/providers/google/cloud/operators/kubernetes_engine.py
@@ -79,9 +79,8 @@ class GKEDeleteClusterOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  api_version: str = 'v2',
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.project_id = project_id
         self.gcp_conn_id = gcp_conn_id
@@ -158,9 +157,8 @@ class GKECreateClusterOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  api_version: str = 'v2',
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.project_id = project_id
         self.gcp_conn_id = gcp_conn_id
diff --git a/airflow/providers/google/cloud/operators/life_sciences.py b/airflow/providers/google/cloud/operators/life_sciences.py
index 9295673..5d6ed88 100644
--- a/airflow/providers/google/cloud/operators/life_sciences.py
+++ b/airflow/providers/google/cloud/operators/life_sciences.py
@@ -55,8 +55,8 @@ class LifeSciencesRunPipelineOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = "google_cloud_default",
                  api_version: str = "v2beta",
-                 *args, **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+                 **kwargs) -> None:
+        super().__init__(**kwargs)
         self.body = body
         self.location = location
         self.project_id = project_id
diff --git a/airflow/providers/google/cloud/operators/mlengine.py b/airflow/providers/google/cloud/operators/mlengine.py
index 623c14d..b8fbf62 100644
--- a/airflow/providers/google/cloud/operators/mlengine.py
+++ b/airflow/providers/google/cloud/operators/mlengine.py
@@ -182,9 +182,8 @@ class MLEngineStartBatchPredictionJobOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self._project_id = project_id
         self._job_id = job_id
@@ -323,9 +322,8 @@ class MLEngineManageModelOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         warnings.warn(
             "This operator is deprecated. Consider using operators for specific operations: "
@@ -385,9 +383,8 @@ class MLEngineCreateModelOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self._project_id = project_id
         self._model = model
         self._gcp_conn_id = gcp_conn_id
@@ -433,9 +430,8 @@ class MLEngineGetModelOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self._project_id = project_id
         self._model_name = model_name
         self._gcp_conn_id = gcp_conn_id
@@ -485,9 +481,8 @@ class MLEngineDeleteModelOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self._project_id = project_id
         self._model_name = model_name
         self._delete_contents = delete_contents
@@ -572,9 +567,8 @@ class MLEngineManageVersionOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self._project_id = project_id
         self._model_name = model_name
         self._version_name = version_name
@@ -666,10 +660,9 @@ class MLEngineCreateVersionOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self._project_id = project_id
         self._model_name = model_name
         self._version = version
@@ -733,10 +726,9 @@ class MLEngineSetDefaultVersionOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self._project_id = project_id
         self._model_name = model_name
         self._version_name = version_name
@@ -795,10 +787,9 @@ class MLEngineListVersionsOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self._project_id = project_id
         self._model_name = model_name
         self._gcp_conn_id = gcp_conn_id
@@ -857,10 +848,9 @@ class MLEngineDeleteVersionOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self._project_id = project_id
         self._model_name = model_name
         self._version_name = version_name
@@ -989,9 +979,8 @@ class MLEngineStartTrainingJobOperator(BaseOperator):
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
                  mode: str = 'PRODUCTION',
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self._project_id = project_id
         self._job_id = job_id
         self._package_uris = package_uris
@@ -1118,9 +1107,8 @@ class MLEngineTrainingCancelJobOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
                  delegate_to: Optional[str] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self._project_id = project_id
         self._job_id = job_id
         self._gcp_conn_id = gcp_conn_id
diff --git a/airflow/providers/google/cloud/operators/natural_language.py b/airflow/providers/google/cloud/operators/natural_language.py
index fae7b04..d86aaca 100644
--- a/airflow/providers/google/cloud/operators/natural_language.py
+++ b/airflow/providers/google/cloud/operators/natural_language.py
@@ -67,10 +67,9 @@ class CloudNaturalLanguageAnalyzeEntitiesOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.document = document
         self.encoding_type = encoding_type
         self.retry = retry
@@ -127,10 +126,9 @@ class CloudNaturalLanguageAnalyzeEntitySentimentOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.document = document
         self.encoding_type = encoding_type
         self.retry = retry
@@ -190,10 +188,9 @@ class CloudNaturalLanguageAnalyzeSentimentOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.document = document
         self.encoding_type = encoding_type
         self.retry = retry
@@ -245,10 +242,9 @@ class CloudNaturalLanguageClassifyTextOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.document = document
         self.retry = retry
         self.timeout = timeout
diff --git a/airflow/providers/google/cloud/operators/pubsub.py b/airflow/providers/google/cloud/operators/pubsub.py
index 233d3fb..8b65428 100644
--- a/airflow/providers/google/cloud/operators/pubsub.py
+++ b/airflow/providers/google/cloud/operators/pubsub.py
@@ -122,7 +122,6 @@ class PubSubCreateTopicOperator(BaseOperator):
             timeout: Optional[float] = None,
             metadata: Optional[Sequence[Tuple[str, str]]] = None,
             project: Optional[str] = None,
-            *args,
             **kwargs) -> None:
 
         # To preserve backward compatibility
@@ -133,7 +132,7 @@ class PubSubCreateTopicOperator(BaseOperator):
                 "the project_id parameter.", DeprecationWarning, stacklevel=2)
             project_id = project
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.topic = topic
         self.fail_if_exists = fail_if_exists
@@ -297,7 +296,6 @@ class PubSubCreateSubscriptionOperator(BaseOperator):
             metadata: Optional[Sequence[Tuple[str, str]]] = None,
             topic_project: Optional[str] = None,
             subscription_project: Optional[str] = None,
-            *args,
             **kwargs) -> None:
 
         # To preserve backward compatibility
@@ -313,7 +311,7 @@ class PubSubCreateSubscriptionOperator(BaseOperator):
                 "the subscription_project parameter.", DeprecationWarning, stacklevel=2)
             subscription_project_id = subscription_project
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.topic = topic
         self.subscription = subscription
@@ -428,7 +426,6 @@ class PubSubDeleteTopicOperator(BaseOperator):
             timeout: Optional[float] = None,
             metadata: Optional[Sequence[Tuple[str, str]]] = None,
             project: Optional[str] = None,
-            *args,
             **kwargs) -> None:
 
         # To preserve backward compatibility
@@ -439,7 +436,7 @@ class PubSubDeleteTopicOperator(BaseOperator):
                 "the project_id parameter.", DeprecationWarning, stacklevel=2)
             project_id = project
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.topic = topic
         self.fail_if_not_exists = fail_if_not_exists
@@ -540,7 +537,6 @@ class PubSubDeleteSubscriptionOperator(BaseOperator):
             timeout: Optional[float] = None,
             metadata: Optional[Sequence[Tuple[str, str]]] = None,
             project: Optional[str] = None,
-            *args,
             **kwargs) -> None:
 
         # To preserve backward compatibility
@@ -551,7 +547,7 @@ class PubSubDeleteSubscriptionOperator(BaseOperator):
                 "the project_id parameter.", DeprecationWarning, stacklevel=2)
             project_id = project
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.subscription = subscription
         self.fail_if_not_exists = fail_if_not_exists
@@ -642,7 +638,6 @@ class PubSubPublishMessageOperator(BaseOperator):
             gcp_conn_id: str = 'google_cloud_default',
             delegate_to: Optional[str] = None,
             project: Optional[str] = None,
-            *args,
             **kwargs) -> None:
 
         # To preserve backward compatibility
@@ -653,7 +648,7 @@ class PubSubPublishMessageOperator(BaseOperator):
                 "the project_id parameter.", DeprecationWarning, stacklevel=2)
             project_id = project
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.project_id = project_id
         self.topic = topic
         self.messages = messages
@@ -728,10 +723,9 @@ class PubSubPullOperator(BaseOperator):
             messages_callback: Optional[Callable[[List[ReceivedMessage], Dict[str, Any]], Any]] = None,
             gcp_conn_id: str = 'google_cloud_default',
             delegate_to: Optional[str] = None,
-            *args,
             **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.gcp_conn_id = gcp_conn_id
         self.delegate_to = delegate_to
         self.project_id = project_id
diff --git a/airflow/providers/google/cloud/operators/spanner.py b/airflow/providers/google/cloud/operators/spanner.py
index 77d1232..2d66b46 100644
--- a/airflow/providers/google/cloud/operators/spanner.py
+++ b/airflow/providers/google/cloud/operators/spanner.py
@@ -67,7 +67,7 @@ class SpannerDeployInstanceOperator(BaseOperator):
                  display_name: str,
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.instance_id = instance_id
         self.project_id = project_id
         self.configuration_name = configuration_name
@@ -75,7 +75,7 @@ class SpannerDeployInstanceOperator(BaseOperator):
         self.display_name = display_name
         self.gcp_conn_id = gcp_conn_id
         self._validate_inputs()
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _validate_inputs(self):
         if self.project_id == '':
@@ -125,12 +125,12 @@ class SpannerDeleteInstanceOperator(BaseOperator):
                  instance_id: str,
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.instance_id = instance_id
         self.project_id = project_id
         self.gcp_conn_id = gcp_conn_id
         self._validate_inputs()
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _validate_inputs(self):
         if self.project_id == '':
@@ -183,14 +183,14 @@ class SpannerQueryDatabaseInstanceOperator(BaseOperator):
                  query: Union[str, List[str]],
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.instance_id = instance_id
         self.project_id = project_id
         self.database_id = database_id
         self.query = query
         self.gcp_conn_id = gcp_conn_id
         self._validate_inputs()
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _validate_inputs(self):
         if self.project_id == '':
@@ -266,14 +266,14 @@ class SpannerDeployDatabaseInstanceOperator(BaseOperator):
                  ddl_statements: List[str],
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.instance_id = instance_id
         self.project_id = project_id
         self.database_id = database_id
         self.ddl_statements = ddl_statements
         self.gcp_conn_id = gcp_conn_id
         self._validate_inputs()
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _validate_inputs(self):
         if self.project_id == '':
@@ -341,7 +341,7 @@ class SpannerUpdateDatabaseInstanceOperator(BaseOperator):
                  project_id: Optional[str] = None,
                  operation_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.instance_id = instance_id
         self.project_id = project_id
         self.database_id = database_id
@@ -349,7 +349,7 @@ class SpannerUpdateDatabaseInstanceOperator(BaseOperator):
         self.operation_id = operation_id
         self.gcp_conn_id = gcp_conn_id
         self._validate_inputs()
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _validate_inputs(self):
         if self.project_id == '':
@@ -411,13 +411,13 @@ class SpannerDeleteDatabaseInstanceOperator(BaseOperator):
                  database_id: str,
                  project_id: Optional[str] = None,
                  gcp_conn_id: str = 'google_cloud_default',
-                 *args, **kwargs) -> None:
+                 **kwargs) -> None:
         self.instance_id = instance_id
         self.project_id = project_id
         self.database_id = database_id
         self.gcp_conn_id = gcp_conn_id
         self._validate_inputs()
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _validate_inputs(self):
         if self.project_id == '':
diff --git a/airflow/providers/google/cloud/operators/speech_to_text.py b/airflow/providers/google/cloud/operators/speech_to_text.py
index 7286c7a..6ef76b3 100644
--- a/airflow/providers/google/cloud/operators/speech_to_text.py
+++ b/airflow/providers/google/cloud/operators/speech_to_text.py
@@ -71,7 +71,6 @@ class CloudSpeechToTextRecognizeSpeechOperator(BaseOperator):
         gcp_conn_id: str = "google_cloud_default",
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        *args,
         **kwargs
     ) -> None:
         self.audio = audio
@@ -81,7 +80,7 @@ class CloudSpeechToTextRecognizeSpeechOperator(BaseOperator):
         self.retry = retry
         self.timeout = timeout
         self._validate_inputs()
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _validate_inputs(self):
         if self.audio == "":
diff --git a/airflow/providers/google/cloud/operators/stackdriver.py b/airflow/providers/google/cloud/operators/stackdriver.py
index 3881acc..c4eda62 100644
--- a/airflow/providers/google/cloud/operators/stackdriver.py
+++ b/airflow/providers/google/cloud/operators/stackdriver.py
@@ -93,9 +93,9 @@ class StackdriverListAlertPoliciesOperator(BaseOperator):
         gcp_conn_id: Optional[str] = 'google_cloud_default',
         project_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.format_ = format_
         self.filter_ = filter_
         self.order_by = order_by
@@ -171,9 +171,9 @@ class StackdriverEnableAlertPoliciesOperator(BaseOperator):
         gcp_conn_id: Optional[str] = 'google_cloud_default',
         project_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.gcp_conn_id = gcp_conn_id
         self.project_id = project_id
         self.delegate_to = delegate_to
@@ -243,9 +243,9 @@ class StackdriverDisableAlertPoliciesOperator(BaseOperator):
         gcp_conn_id: Optional[str] = 'google_cloud_default',
         project_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.gcp_conn_id = gcp_conn_id
         self.project_id = project_id
         self.delegate_to = delegate_to
@@ -317,9 +317,9 @@ class StackdriverUpsertAlertOperator(BaseOperator):
         gcp_conn_id: Optional[str] = 'google_cloud_default',
         project_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.alerts = alerts
         self.retry = retry
         self.timeout = timeout
@@ -387,9 +387,9 @@ class StackdriverDeleteAlertOperator(BaseOperator):
         gcp_conn_id: Optional[str] = 'google_cloud_default',
         project_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.name = name
         self.retry = retry
         self.timeout = timeout
@@ -480,9 +480,9 @@ class StackdriverListNotificationChannelsOperator(BaseOperator):
         gcp_conn_id: Optional[str] = 'google_cloud_default',
         project_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.format_ = format_
         self.filter_ = filter_
         self.order_by = order_by
@@ -561,9 +561,9 @@ class StackdriverEnableNotificationChannelsOperator(BaseOperator):
         gcp_conn_id: Optional[str] = 'google_cloud_default',
         project_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.filter_ = filter_
         self.retry = retry
         self.timeout = timeout
@@ -634,9 +634,9 @@ class StackdriverDisableNotificationChannelsOperator(BaseOperator):
         gcp_conn_id: Optional[str] = 'google_cloud_default',
         project_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.filter_ = filter_
         self.retry = retry
         self.timeout = timeout
@@ -709,9 +709,9 @@ class StackdriverUpsertNotificationChannelOperator(BaseOperator):
         gcp_conn_id: Optional[str] = 'google_cloud_default',
         project_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.channels = channels
         self.retry = retry
         self.timeout = timeout
@@ -780,9 +780,9 @@ class StackdriverDeleteNotificationChannelOperator(BaseOperator):
         gcp_conn_id: Optional[str] = 'google_cloud_default',
         project_id: Optional[str] = None,
         delegate_to: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.name = name
         self.retry = retry
         self.timeout = timeout
diff --git a/airflow/providers/google/cloud/operators/tasks.py b/airflow/providers/google/cloud/operators/tasks.py
index 92c2da3..e2f91c6 100644
--- a/airflow/providers/google/cloud/operators/tasks.py
+++ b/airflow/providers/google/cloud/operators/tasks.py
@@ -85,10 +85,9 @@ class CloudTasksQueueCreateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.task_queue = task_queue
         self.project_id = project_id
@@ -179,10 +178,9 @@ class CloudTasksQueueUpdateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.task_queue = task_queue
         self.project_id = project_id
         self.location = location
@@ -245,10 +243,9 @@ class CloudTasksQueueGetOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.queue_name = queue_name
         self.project_id = project_id
@@ -311,10 +308,9 @@ class CloudTasksQueuesListOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.project_id = project_id
         self.results_filter = results_filter
@@ -374,10 +370,9 @@ class CloudTasksQueueDeleteOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.queue_name = queue_name
         self.project_id = project_id
@@ -435,10 +430,9 @@ class CloudTasksQueuePurgeOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.queue_name = queue_name
         self.project_id = project_id
@@ -497,10 +491,9 @@ class CloudTasksQueuePauseOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.queue_name = queue_name
         self.project_id = project_id
@@ -559,10 +552,9 @@ class CloudTasksQueueResumeOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.queue_name = queue_name
         self.project_id = project_id
@@ -640,10 +632,9 @@ class CloudTasksTaskCreateOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.queue_name = queue_name
         self.task = task
@@ -721,10 +712,9 @@ class CloudTasksTaskGetOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.queue_name = queue_name
         self.task_name = task_name
@@ -795,10 +785,9 @@ class CloudTasksTasksListOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.queue_name = queue_name
         self.project_id = project_id
@@ -869,10 +858,9 @@ class CloudTasksTaskDeleteOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.queue_name = queue_name
         self.task_name = task_name
@@ -945,10 +933,9 @@ class CloudTasksTaskRunOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.queue_name = queue_name
         self.task_name = task_name
diff --git a/airflow/providers/google/cloud/operators/text_to_speech.py b/airflow/providers/google/cloud/operators/text_to_speech.py
index d652d3d..9641a23 100644
--- a/airflow/providers/google/cloud/operators/text_to_speech.py
+++ b/airflow/providers/google/cloud/operators/text_to_speech.py
@@ -91,7 +91,6 @@ class CloudTextToSpeechSynthesizeOperator(BaseOperator):
         gcp_conn_id: str = "google_cloud_default",
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
-        *args,
         **kwargs
     ) -> None:
         self.input_data = input_data
@@ -104,7 +103,7 @@ class CloudTextToSpeechSynthesizeOperator(BaseOperator):
         self.retry = retry
         self.timeout = timeout
         self._validate_inputs()
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
     def _validate_inputs(self):
         for parameter in [
diff --git a/airflow/providers/google/cloud/operators/translate.py b/airflow/providers/google/cloud/operators/translate.py
index 3d2e2d3..cdfe52b 100644
--- a/airflow/providers/google/cloud/operators/translate.py
+++ b/airflow/providers/google/cloud/operators/translate.py
@@ -86,10 +86,9 @@ class CloudTranslateTextOperator(BaseOperator):
         source_language: Optional[str],
         model: str,
         gcp_conn_id: str = 'google_cloud_default',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.values = values
         self.target_language = target_language
         self.format_ = format_
diff --git a/airflow/providers/google/cloud/operators/translate_speech.py b/airflow/providers/google/cloud/operators/translate_speech.py
index 6c5781d..33c3c60 100644
--- a/airflow/providers/google/cloud/operators/translate_speech.py
+++ b/airflow/providers/google/cloud/operators/translate_speech.py
@@ -112,10 +112,9 @@ class CloudTranslateSpeechOperator(BaseOperator):
         model: str,
         project_id: Optional[str] = None,
         gcp_conn_id: str = 'google_cloud_default',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.audio = audio
         self.config = config
         self.target_language = target_language
diff --git a/airflow/providers/google/cloud/operators/video_intelligence.py b/airflow/providers/google/cloud/operators/video_intelligence.py
index c181f25..30ede45 100644
--- a/airflow/providers/google/cloud/operators/video_intelligence.py
+++ b/airflow/providers/google/cloud/operators/video_intelligence.py
@@ -78,10 +78,9 @@ class CloudVideoIntelligenceDetectVideoLabelsOperator(BaseOperator):
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.input_uri = input_uri
         self.input_content = input_content
         self.output_uri = output_uri
@@ -157,10 +156,9 @@ class CloudVideoIntelligenceDetectVideoExplicitContentOperator(BaseOperator):
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.input_uri = input_uri
         self.output_uri = output_uri
         self.input_content = input_content
@@ -236,10 +234,9 @@ class CloudVideoIntelligenceDetectVideoShotsOperator(BaseOperator):
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.input_uri = input_uri
         self.output_uri = output_uri
         self.input_content = input_content
diff --git a/airflow/providers/google/cloud/operators/vision.py b/airflow/providers/google/cloud/operators/vision.py
index 514db58..95d5fc6 100644
--- a/airflow/providers/google/cloud/operators/vision.py
+++ b/airflow/providers/google/cloud/operators/vision.py
@@ -84,10 +84,9 @@ class CloudVisionCreateProductSetOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.project_id = project_id
         self.product_set = product_set
@@ -159,10 +158,9 @@ class CloudVisionGetProductSetOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = 'google_cloud_default',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.project_id = project_id
         self.product_set_id = product_set_id
@@ -246,10 +244,9 @@ class CloudVisionUpdateProductSetOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = 'google_cloud_default',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.product_set = product_set
         self.update_mask = update_mask
         self.location = location
@@ -318,10 +315,9 @@ class CloudVisionDeleteProductSetOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = 'google_cloud_default',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.project_id = project_id
         self.product_set_id = product_set_id
@@ -397,10 +393,9 @@ class CloudVisionCreateProductOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = 'google_cloud_default',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.product = product
         self.project_id = project_id
@@ -475,10 +470,9 @@ class CloudVisionGetProductOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.product_id = product_id
         self.project_id = project_id
@@ -573,10 +567,9 @@ class CloudVisionUpdateProductOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = 'google_cloud_default',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.product = product
         self.location = location
         self.product_id = product_id
@@ -650,10 +643,9 @@ class CloudVisionDeleteProductOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = 'google_cloud_default',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.product_id = product_id
         self.project_id = project_id
@@ -708,10 +700,9 @@ class CloudVisionImageAnnotateOperator(BaseOperator):
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
         gcp_conn_id: str = 'google_cloud_default',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.request = request
         self.retry = retry
         self.timeout = timeout
@@ -793,10 +784,9 @@ class CloudVisionCreateReferenceImageOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = 'google_cloud_default',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.product_id = product_id
         self.reference_image = reference_image
@@ -882,10 +872,9 @@ class CloudVisionDeleteReferenceImageOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = 'google_cloud_default',
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.location = location
         self.product_id = product_id
         self.reference_image_id = reference_image_id
@@ -959,10 +948,9 @@ class CloudVisionAddProductToProductSetOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.product_set_id = product_set_id
         self.product_id = product_id
         self.location = location
@@ -1030,10 +1018,9 @@ class CloudVisionRemoveProductFromProductSetOperator(BaseOperator):
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.product_set_id = product_set_id
         self.product_id = product_id
         self.location = location
@@ -1098,10 +1085,9 @@ class CloudVisionDetectTextOperator(BaseOperator):
         web_detection_params: Optional[Dict] = None,
         additional_properties: Optional[Dict] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.image = image
         self.max_results = max_results
         self.retry = retry
@@ -1167,10 +1153,9 @@ class CloudVisionTextDetectOperator(BaseOperator):
         web_detection_params: Optional[Dict] = None,
         additional_properties: Optional[Dict] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.image = image
         self.max_results = max_results
         self.retry = retry
@@ -1227,10 +1212,9 @@ class CloudVisionDetectImageLabelsOperator(BaseOperator):
         timeout: Optional[float] = None,
         additional_properties: Optional[Dict] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.image = image
         self.max_results = max_results
         self.retry = retry
@@ -1283,10 +1267,9 @@ class CloudVisionDetectImageSafeSearchOperator(BaseOperator):
         timeout: Optional[float] = None,
         additional_properties: Optional[Dict] = None,
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.image = image
         self.max_results = max_results
         self.retry = retry
diff --git a/airflow/providers/google/firebase/operators/firestore.py b/airflow/providers/google/firebase/operators/firestore.py
index 0795cd9..ecbcb0a 100644
--- a/airflow/providers/google/firebase/operators/firestore.py
+++ b/airflow/providers/google/firebase/operators/firestore.py
@@ -57,10 +57,9 @@ class CloudFirestoreExportDatabaseOperator(BaseOperator):
         project_id: Optional[str] = None,
         gcp_conn_id: str = "google_cloud_default",
         api_version: str = "v1",
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.database_id = database_id
         self.body = body
         self.project_id = project_id
diff --git a/airflow/providers/google/marketing_platform/operators/analytics.py b/airflow/providers/google/marketing_platform/operators/analytics.py
index 9564596..e153b02 100644
--- a/airflow/providers/google/marketing_platform/operators/analytics.py
+++ b/airflow/providers/google/marketing_platform/operators/analytics.py
@@ -58,10 +58,9 @@ class GoogleAnalyticsListAccountsOperator(BaseOperator):
         self,
         api_version: str = "v3",
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.api_version = api_version
         self.gcp_conn_id = gcp_conn_id
@@ -110,10 +109,9 @@ class GoogleAnalyticsGetAdsLinkOperator(BaseOperator):
         web_property_id: str,
         api_version: str = "v3",
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.account_id = account_id
         self.web_property_ad_words_link_id = web_property_ad_words_link_id
@@ -165,10 +163,9 @@ class GoogleAnalyticsRetrieveAdsLinksListOperator(BaseOperator):
         web_property_id: str,
         api_version: str = "v3",
         gcp_conn_id: str = "google_cloud_default",
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.account_id = account_id
         self.web_property_id = web_property_id
@@ -226,10 +223,9 @@ class GoogleAnalyticsDataImportUploadOperator(BaseOperator):
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
         api_version: str = "v3",
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.storage_bucket = storage_bucket
         self.storage_name_object = storage_name_object
         self.account_id = account_id
@@ -298,10 +294,9 @@ class GoogleAnalyticsDeletePreviousDataUploadsOperator(BaseOperator):
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
         api_version: str = "v3",
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.account_id = account_id
         self.web_property_id = web_property_id
@@ -364,11 +359,10 @@ class GoogleAnalyticsModifyFileHeadersDataImportOperator(BaseOperator):
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
         custom_dimension_header_mapping: Optional[Dict[str, str]] = None,
-        *args,
         **kwargs
     ):
         super(GoogleAnalyticsModifyFileHeadersDataImportOperator, self).__init__(
-            *args, **kwargs
+            **kwargs
         )
         self.storage_bucket = storage_bucket
         self.storage_name_object = storage_name_object
diff --git a/airflow/providers/google/marketing_platform/operators/campaign_manager.py b/airflow/providers/google/marketing_platform/operators/campaign_manager.py
index 1c8760e..817bdb5 100644
--- a/airflow/providers/google/marketing_platform/operators/campaign_manager.py
+++ b/airflow/providers/google/marketing_platform/operators/campaign_manager.py
@@ -77,10 +77,9 @@ class GoogleCampaignManagerDeleteReportOperator(BaseOperator):
         api_version: str = "v3.3",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         if not (report_name or report_id):
             raise AirflowException("Please provide `report_name` or `report_id`.")
         if report_name and report_id:
@@ -175,10 +174,9 @@ class GoogleCampaignManagerDownloadReportOperator(BaseOperator):
         api_version: str = "v3.3",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.profile_id = profile_id
         self.report_id = report_id
         self.file_id = file_id
@@ -289,10 +287,9 @@ class GoogleCampaignManagerInsertReportOperator(BaseOperator):
         api_version: str = "v3.3",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.profile_id = profile_id
         self.report = report
         self.api_version = api_version
@@ -366,10 +363,9 @@ class GoogleCampaignManagerRunReportOperator(BaseOperator):
         api_version: str = "v3.3",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.profile_id = profile_id
         self.report_id = report_id
         self.synchronous = synchronous
@@ -452,10 +448,9 @@ class GoogleCampaignManagerBatchInsertConversionsOperator(BaseOperator):
         api_version: str = "v3.3",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.profile_id = profile_id
         self.conversions = conversions
         self.encryption_entity_type = encryption_entity_type
@@ -540,10 +535,9 @@ class GoogleCampaignManagerBatchUpdateConversionsOperator(BaseOperator):
         api_version: str = "v3.3",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.profile_id = profile_id
         self.conversions = conversions
         self.encryption_entity_type = encryption_entity_type
diff --git a/airflow/providers/google/marketing_platform/operators/display_video.py b/airflow/providers/google/marketing_platform/operators/display_video.py
index 8f5f5b4..2b3f69c 100644
--- a/airflow/providers/google/marketing_platform/operators/display_video.py
+++ b/airflow/providers/google/marketing_platform/operators/display_video.py
@@ -67,10 +67,9 @@ class GoogleDisplayVideo360CreateReportOperator(BaseOperator):
         api_version: str = "v1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.body = body
         self.api_version = api_version
         self.gcp_conn_id = gcp_conn_id
@@ -131,10 +130,9 @@ class GoogleDisplayVideo360DeleteReportOperator(BaseOperator):
         api_version: str = "v1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.report_id = report_id
         self.report_name = report_name
         self.api_version = api_version
@@ -215,10 +213,9 @@ class GoogleDisplayVideo360DownloadReportOperator(BaseOperator):
         api_version: str = "v1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.report_id = report_id
         self.chunk_size = chunk_size
         self.gzip = gzip
@@ -318,10 +315,9 @@ class GoogleDisplayVideo360RunReportOperator(BaseOperator):
         api_version: str = "v1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.report_id = report_id
         self.params = params
         self.api_version = api_version
@@ -372,10 +368,9 @@ class GoogleDisplayVideo360DownloadLineItemsOperator(BaseOperator):
         api_version: str = "v1.1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.request_body = request_body
         self.object_name = object_name
         self.bucket_name = bucket_name
@@ -446,10 +441,9 @@ class GoogleDisplayVideo360UploadLineItemsOperator(BaseOperator):
         api_version: str = "v1.1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.bucket_name = bucket_name
         self.object_name = object_name
         self.api_version = api_version
@@ -517,10 +511,9 @@ class GoogleDisplayVideo360CreateSDFDownloadTaskOperator(BaseOperator):
         api_version: str = "v1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.body_request = body_request
         self.api_version = api_version
         self.gcp_conn_id = gcp_conn_id
@@ -584,10 +577,9 @@ class GoogleDisplayVideo360SDFtoGCSOperator(BaseOperator):
         api_version: str = "v1",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.operation_name = operation_name
         self.bucket_name = bucket_name
         self.object_name = object_name
diff --git a/airflow/providers/google/marketing_platform/operators/search_ads.py b/airflow/providers/google/marketing_platform/operators/search_ads.py
index 4f2200f..cd2e94a 100644
--- a/airflow/providers/google/marketing_platform/operators/search_ads.py
+++ b/airflow/providers/google/marketing_platform/operators/search_ads.py
@@ -62,10 +62,9 @@ class GoogleSearchAdsInsertReportOperator(BaseOperator):
         api_version: str = "v2",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.report = report
         self.api_version = api_version
         self.gcp_conn_id = gcp_conn_id
@@ -134,10 +133,9 @@ class GoogleSearchAdsDownloadReportOperator(BaseOperator):
         api_version: str = "v2",
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.report_id = report_id
         self.api_version = api_version
         self.gcp_conn_id = gcp_conn_id
diff --git a/airflow/providers/google/suite/operators/sheets.py b/airflow/providers/google/suite/operators/sheets.py
index 1dfe3da..d39e1ec 100644
--- a/airflow/providers/google/suite/operators/sheets.py
+++ b/airflow/providers/google/suite/operators/sheets.py
@@ -45,10 +45,9 @@ class GoogleSheetsCreateSpreadsheetOperator(BaseOperator):
         spreadsheet: Dict[str, Any],
         gcp_conn_id: str = "google_cloud_default",
         delegate_to: Optional[str] = None,
-        *args,
         **kwargs,
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.gcp_conn_id = gcp_conn_id
         self.spreadsheet = spreadsheet
         self.delegate_to = delegate_to
diff --git a/airflow/providers/grpc/operators/grpc.py b/airflow/providers/grpc/operators/grpc.py
index 107efbc..5322e33 100644
--- a/airflow/providers/grpc/operators/grpc.py
+++ b/airflow/providers/grpc/operators/grpc.py
@@ -63,8 +63,8 @@ class GrpcOperator(BaseOperator):
                  streaming: bool = False,
                  response_callback: Optional[Callable] = None,
                  log_response: bool = False,
-                 *args, **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+                 **kwargs) -> None:
+        super().__init__(**kwargs)
         self.stub_class = stub_class
         self.call_func = call_func
         self.grpc_conn_id = grpc_conn_id
diff --git a/airflow/providers/http/operators/http.py b/airflow/providers/http/operators/http.py
index 328ce27..909fb8c 100644
--- a/airflow/providers/http/operators/http.py
+++ b/airflow/providers/http/operators/http.py
@@ -74,8 +74,8 @@ class SimpleHttpOperator(BaseOperator):
                  extra_options: Optional[Dict[str, Any]] = None,
                  http_conn_id: str = 'http_default',
                  log_response: bool = False,
-                 *args: Any, **kwargs: Any) -> None:
-        super().__init__(*args, **kwargs)
+                 **kwargs: Any) -> None:
+        super().__init__(**kwargs)
         self.http_conn_id = http_conn_id
         self.method = method
         self.endpoint = endpoint
diff --git a/airflow/providers/jdbc/operators/jdbc.py b/airflow/providers/jdbc/operators/jdbc.py
index b973f3f..881dad3 100644
--- a/airflow/providers/jdbc/operators/jdbc.py
+++ b/airflow/providers/jdbc/operators/jdbc.py
@@ -51,8 +51,8 @@ class JdbcOperator(BaseOperator):
                  jdbc_conn_id: str = 'jdbc_default',
                  autocommit: bool = False,
                  parameters: Optional[Union[Mapping, Iterable]] = None,
-                 *args, **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+                 **kwargs) -> None:
+        super().__init__(**kwargs)
         self.parameters = parameters
         self.sql = sql
         self.jdbc_conn_id = jdbc_conn_id
diff --git a/airflow/providers/jenkins/operators/jenkins_job_trigger.py b/airflow/providers/jenkins/operators/jenkins_job_trigger.py
index 434279c..6db7688 100644
--- a/airflow/providers/jenkins/operators/jenkins_job_trigger.py
+++ b/airflow/providers/jenkins/operators/jenkins_job_trigger.py
@@ -99,9 +99,8 @@ class JenkinsJobTriggerOperator(BaseOperator):
                  parameters="",
                  sleep_time=10,
                  max_try_before_job_appears=10,
-                 *args,
                  **kwargs):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.job_name = job_name
         self.parameters = parameters
         if sleep_time < 1:
diff --git a/airflow/providers/jira/operators/jira.py b/airflow/providers/jira/operators/jira.py
index 352fe1a..2925014 100644
--- a/airflow/providers/jira/operators/jira.py
+++ b/airflow/providers/jira/operators/jira.py
@@ -51,9 +51,8 @@ class JiraOperator(BaseOperator):
                  jira_method_args: Optional[dict] = None,
                  result_processor: Optional[Callable] = None,
                  get_jira_resource_method: Optional[Callable] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.jira_conn_id = jira_conn_id
         self.method_name = jira_method
         self.jira_method_args = jira_method_args
diff --git a/airflow/providers/microsoft/azure/operators/adls_list.py b/airflow/providers/microsoft/azure/operators/adls_list.py
index 3c77459..fa32ec5 100644
--- a/airflow/providers/microsoft/azure/operators/adls_list.py
+++ b/airflow/providers/microsoft/azure/operators/adls_list.py
@@ -53,9 +53,8 @@ class AzureDataLakeStorageListOperator(BaseOperator):
     def __init__(self,
                  path: str,
                  azure_data_lake_conn_id: str = 'azure_data_lake_default',
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.path = path
         self.azure_data_lake_conn_id = azure_data_lake_conn_id
 
diff --git a/airflow/providers/microsoft/azure/operators/adx.py b/airflow/providers/microsoft/azure/operators/adx.py
index ebd1e95..ee6f557 100644
--- a/airflow/providers/microsoft/azure/operators/adx.py
+++ b/airflow/providers/microsoft/azure/operators/adx.py
@@ -53,9 +53,8 @@ class AzureDataExplorerQueryOperator(BaseOperator):
             database: str,
             options: Optional[Dict] = None,
             azure_data_explorer_conn_id: str = 'azure_data_explorer_default',
-            *args,
             **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.query = query
         self.database = database
         self.options = options
diff --git a/airflow/providers/microsoft/azure/operators/azure_batch.py b/airflow/providers/microsoft/azure/operators/azure_batch.py
index ff3f35a..7aefe07 100644
--- a/airflow/providers/microsoft/azure/operators/azure_batch.py
+++ b/airflow/providers/microsoft/azure/operators/azure_batch.py
@@ -178,10 +178,9 @@ class AzureBatchOperator(BaseOperator):
                  timeout: int = 25,
                  should_delete_job: bool = False,
                  should_delete_pool: bool = False,
-                 *args,
                  **kwargs) -> None:
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.batch_pool_id = batch_pool_id
         self.batch_pool_vm_size = batch_pool_vm_size
         self.batch_job_id = batch_job_id
diff --git a/airflow/providers/microsoft/azure/operators/azure_container_instances.py b/airflow/providers/microsoft/azure/operators/azure_container_instances.py
index 06f2a34..0c3c217 100644
--- a/airflow/providers/microsoft/azure/operators/azure_container_instances.py
+++ b/airflow/providers/microsoft/azure/operators/azure_container_instances.py
@@ -139,9 +139,8 @@ class AzureContainerInstancesOperator(BaseOperator):
                  remove_on_error: bool = True,
                  fail_if_exists: bool = True,
                  tags: Optional[Dict[str, str]] = None,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.ci_conn_id = ci_conn_id
         self.resource_group = resource_group
diff --git a/airflow/providers/microsoft/azure/operators/azure_cosmos.py b/airflow/providers/microsoft/azure/operators/azure_cosmos.py
index 10bfc24..98fa696 100644
--- a/airflow/providers/microsoft/azure/operators/azure_cosmos.py
+++ b/airflow/providers/microsoft/azure/operators/azure_cosmos.py
@@ -45,9 +45,8 @@ class AzureCosmosInsertDocumentOperator(BaseOperator):
                  collection_name: str,
                  document: dict,
                  azure_cosmos_conn_id: str = 'azure_cosmos_default',
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.database_name = database_name
         self.collection_name = collection_name
         self.document = document
diff --git a/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py b/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py
index b669e63..c17d18a 100644
--- a/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py
+++ b/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py
@@ -52,9 +52,8 @@ class WasbDeleteBlobOperator(BaseOperator):
                  check_options: Any = None,
                  is_prefix: bool = False,
                  ignore_if_missing: bool = False,
-                 *args,
                  **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         if check_options is None:
             check_options = {}
         self.wasb_conn_id = wasb_conn_id
diff --git a/airflow/providers/microsoft/mssql/operators/mssql.py b/airflow/providers/microsoft/mssql/operators/mssql.py
index 88f6146..9a9fa15 100644
--- a/airflow/providers/microsoft/mssql/operators/mssql.py
+++ b/airflow/providers/microsoft/mssql/operators/mssql.py
@@ -59,9 +59,9 @@ class MsSqlOperator(BaseOperator):
         parameters: Optional[Union[Mapping, Iterable]] = None,
         autocommit: bool = False,
         database: Optional[str] = None,
-        *args, **kwargs
+        **kwargs
     ) -> None:
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.mssql_conn_id = mssql_conn_id
         self.sql = sql
         self.parameters = parameters
diff --git a/airflow/providers/microsoft/winrm/operators/winrm.py b/airflow/providers/microsoft/winrm/operators/winrm.py
index 53e80eb..3ada2f3 100644
--- a/airflow/providers/microsoft/winrm/operators/winrm.py
+++ b/airflow/providers/microsoft/winrm/operators/winrm.py
@@ -57,9 +57,8 @@ class WinRMOperator(BaseOperator):
                  remote_host=None,
                  command=None,
                  timeout=10,
-                 *args,
                  **kwargs):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.winrm_hook = winrm_hook
         self.ssh_conn_id = ssh_conn_id
         self.remote_host = remote_host
diff --git a/airflow/providers/mysql/operators/mysql.py b/airflow/providers/mysql/operators/mysql.py
index 8dbf24f..ef8bd7a 100644
--- a/airflow/providers/mysql/operators/mysql.py
+++ b/airflow/providers/mysql/operators/mysql.py
@@ -54,8 +54,8 @@ class MySqlOperator(BaseOperator):
             parameters: Optional[Union[Mapping, Iterable]] = None,
             autocommit: bool = False,
             database: Optional[str] = None,
-            *args, **kwargs):
-        super().__init__(*args, **kwargs)
+            **kwargs):
+        super().__init__(**kwargs)
         self.mysql_conn_id = mysql_conn_id
         self.sql = sql
         self.autocommit = autocommit
diff --git a/airflow/providers/opsgenie/operators/opsgenie_alert.py b/airflow/providers/opsgenie/operators/opsgenie_alert.py
index ce09574..06830e0 100644
--- a/airflow/providers/opsgenie/operators/opsgenie_alert.py
+++ b/airflow/providers/opsgenie/operators/opsgenie_alert.py
@@ -83,10 +83,9 @@ class OpsgenieAlertOperator(BaseOperator):
                  priority=None,
                  user=None,
                  note=None,
-                 *args,
                  **kwargs
                  ):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
 
         self.message = message
         self.opsgenie_conn_id = opsgenie_conn_id
diff --git a/airflow/providers/oracle/operators/oracle.py b/airflow/providers/oracle/operators/oracle.py
index 19bc143..54adbf4 100644
--- a/airflow/providers/oracle/operators/oracle.py
+++ b/airflow/providers/oracle/operators/oracle.py
@@ -51,8 +51,8 @@ class OracleOperator(BaseOperator):
             oracle_conn_id: str = 'oracle_default',
             parameters: Optional[Union[Mapping, Iterable]] = None,
             autocommit: bool = False,
-            *args, **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+            **kwargs) -> None:
+        super().__init__(**kwargs)
         self.oracle_conn_id = oracle_conn_id
         self.sql = sql
         self.autocommit = autocommit
diff --git a/airflow/providers/papermill/operators/papermill.py b/airflow/providers/papermill/operators/papermill.py
index b34e5c8..3ffa77e 100644
--- a/airflow/providers/papermill/operators/papermill.py
+++ b/airflow/providers/papermill/operators/papermill.py
@@ -54,8 +54,8 @@ class PapermillOperator(BaseOperator):
                  input_nb: Optional[str] = None,
                  output_nb: Optional[str] = None,
                  parameters: Optional[Dict] = None,
-                 *args, **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+                 **kwargs) -> None:
+        super().__init__(**kwargs)
 
         if input_nb:
             self.inlets.append(NoteBook(url=input_nb,
diff --git a/airflow/providers/postgres/operators/postgres.py b/airflow/providers/postgres/operators/postgres.py
index 5304f74..bf27458 100644
--- a/airflow/providers/postgres/operators/postgres.py
+++ b/airflow/providers/postgres/operators/postgres.py
@@ -53,8 +53,8 @@ class PostgresOperator(BaseOperator):
             autocommit: bool = False,
             parameters: Optional[Union[Mapping, Iterable]] = None,
             database: Optional[str] = None,
-            *args, **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+            **kwargs) -> None:
+        super().__init__(**kwargs)
         self.sql = sql
         self.postgres_conn_id = postgres_conn_id
         self.autocommit = autocommit
diff --git a/airflow/providers/qubole/operators/qubole.py b/airflow/providers/qubole/operators/qubole.py
index bae58c6..84afd5e 100644
--- a/airflow/providers/qubole/operators/qubole.py
+++ b/airflow/providers/qubole/operators/qubole.py
@@ -190,7 +190,7 @@ class QuboleOperator(BaseOperator):
         self.kwargs['qubole_conn_id'] = qubole_conn_id
         self.hook = None
         filtered_base_kwargs = self._get_filtered_args(kwargs)
-        super().__init__(*args, **filtered_base_kwargs)
+        super().__init__(**filtered_base_kwargs)
 
         if self.on_failure_callback is None:
             self.on_failure_callback = QuboleHook.handle_failure_retry
diff --git a/airflow/providers/qubole/operators/qubole_check.py b/airflow/providers/qubole/operators/qubole_check.py
index 0a59f23..5ed840e 100644
--- a/airflow/providers/qubole/operators/qubole_check.py
+++ b/airflow/providers/qubole/operators/qubole_check.py
@@ -81,9 +81,9 @@ class QuboleCheckOperator(CheckOperator, QuboleOperator):
     ui_fgcolor = '#000'
 
     @apply_defaults
-    def __init__(self, qubole_conn_id="qubole_default", *args, **kwargs):
+    def __init__(self, qubole_conn_id="qubole_default", **kwargs):
         sql = get_sql_from_qbol_cmd(kwargs)
-        super().__init__(qubole_conn_id=qubole_conn_id, sql=sql, *args, **kwargs)
+        super().__init__(qubole_conn_id=qubole_conn_id, sql=sql, **kwargs)
         self.on_failure_callback = QuboleCheckHook.handle_failure_retry
         self.on_retry_callback = QuboleCheckHook.handle_failure_retry
 
@@ -163,13 +163,13 @@ class QuboleValueCheckOperator(ValueCheckOperator, QuboleOperator):
 
     @apply_defaults
     def __init__(self, pass_value, tolerance=None, results_parser_callable=None,
-                 qubole_conn_id="qubole_default", *args, **kwargs):
+                 qubole_conn_id="qubole_default", **kwargs):
 
         sql = get_sql_from_qbol_cmd(kwargs)
         super().__init__(
             qubole_conn_id=qubole_conn_id,
             sql=sql, pass_value=pass_value, tolerance=tolerance,
-            *args, **kwargs)
+            **kwargs)
 
         self.results_parser_callable = results_parser_callable
         self.on_failure_callback = QuboleCheckHook.handle_failure_retry
diff --git a/airflow/providers/redis/operators/redis_publish.py b/airflow/providers/redis/operators/redis_publish.py
index 8c357af..eca59fa 100644
--- a/airflow/providers/redis/operators/redis_publish.py
+++ b/airflow/providers/redis/operators/redis_publish.py
@@ -43,9 +43,9 @@ class RedisPublishOperator(BaseOperator):
             channel: str,
             message: str,
             redis_conn_id: str = 'redis_default',
-            *args, **kwargs) -> None:
+            **kwargs) -> None:
 
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.redis_conn_id = redis_conn_id
         self.channel = channel
         self.message = message
diff --git a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py b/airflow/providers/salesforce/operators/tableau_refresh_workbook.py
index 798a424..53bc2bb 100644
--- a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py
+++ b/airflow/providers/salesforce/operators/tableau_refresh_workbook.py
@@ -47,9 +47,8 @@ class TableauRefreshWorkbookOperator(BaseOperator):
                  site_id: Optional[str] = None,
                  blocking: bool = True,
                  tableau_conn_id: str = 'tableau_default',
-                 *args,
                  **kwargs):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.workbook_name = workbook_name
         self.site_id = site_id
         self.blocking = blocking
diff --git a/airflow/providers/segment/operators/segment_track_event.py b/airflow/providers/segment/operators/segment_track_event.py
index 4d3db18..12adf17 100644
--- a/airflow/providers/segment/operators/segment_track_event.py
+++ b/airflow/providers/segment/operators/segment_track_event.py
@@ -47,9 +47,8 @@ class SegmentTrackEventOperator(BaseOperator):
                  properties=None,
                  segment_conn_id='segment_default',
                  segment_debug_mode=False,
-                 *args,
                  **kwargs):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.user_id = user_id
         self.event = event
         properties = properties or {}
diff --git a/airflow/providers/sftp/operators/sftp.py b/airflow/providers/sftp/operators/sftp.py
index 6296ce6..d01fa43 100644
--- a/airflow/providers/sftp/operators/sftp.py
+++ b/airflow/providers/sftp/operators/sftp.py
@@ -90,9 +90,8 @@ class SFTPOperator(BaseOperator):
                  operation=SFTPOperation.PUT,
                  confirm=True,
                  create_intermediate_dirs=False,
-                 *args,
                  **kwargs):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.ssh_hook = ssh_hook
         self.ssh_conn_id = ssh_conn_id
         self.remote_host = remote_host
diff --git a/airflow/providers/singularity/operators/singularity.py b/airflow/providers/singularity/operators/singularity.py
index 740fbc8..122091a 100644
--- a/airflow/providers/singularity/operators/singularity.py
+++ b/airflow/providers/singularity/operators/singularity.py
@@ -76,10 +76,9 @@ class SingularityOperator(BaseOperator):
             volumes: Optional[List[str]] = None,
             options: Optional[List[str]] = None,
             auto_remove: Optional[bool] = False,
-            *args,
             **kwargs) -> None:
 
-        super(SingularityOperator, self).__init__(*args, **kwargs)
+        super(SingularityOperator, self).__init__(**kwargs)
         self.auto_remove = auto_remove
         self.command = command
         self.start_command = start_command
diff --git a/airflow/providers/slack/operators/slack_webhook.py b/airflow/providers/slack/operators/slack_webhook.py
index 5e457a5..4f0d8cb 100644
--- a/airflow/providers/slack/operators/slack_webhook.py
+++ b/airflow/providers/slack/operators/slack_webhook.py
@@ -75,10 +75,8 @@ class SlackWebhookOperator(SimpleHttpOperator):
                  icon_url=None,
                  link_names=False,
                  proxy=None,
-                 *args,
                  **kwargs):
         super().__init__(endpoint=webhook_token,
-                         *args,
                          **kwargs)
         self.http_conn_id = http_conn_id
         self.webhook_token = webhook_token
diff --git a/airflow/providers/snowflake/operators/snowflake.py b/airflow/providers/snowflake/operators/snowflake.py
index f8410e6..470000b 100644
--- a/airflow/providers/snowflake/operators/snowflake.py
+++ b/airflow/providers/snowflake/operators/snowflake.py
@@ -65,8 +65,8 @@ class SnowflakeOperator(BaseOperator):
     def __init__(
             self, sql, snowflake_conn_id='snowflake_default', parameters=None,
             autocommit=True, warehouse=None, database=None, role=None,
-            schema=None, authenticator=None, *args, **kwargs):
-        super().__init__(*args, **kwargs)
+            schema=None, authenticator=None, **kwargs):
+        super().__init__(**kwargs)
         self.snowflake_conn_id = snowflake_conn_id
         self.sql = sql
         self.autocommit = autocommit
diff --git a/airflow/providers/sqlite/operators/sqlite.py b/airflow/providers/sqlite/operators/sqlite.py
index 1310b72..e3066f3 100644
--- a/airflow/providers/sqlite/operators/sqlite.py
+++ b/airflow/providers/sqlite/operators/sqlite.py
@@ -45,8 +45,8 @@ class SqliteOperator(BaseOperator):
             sql: str,
             sqlite_conn_id: str = 'sqlite_default',
             parameters: Optional[Union[Mapping, Iterable]] = None,
-            *args, **kwargs) -> None:
-        super().__init__(*args, **kwargs)
+            **kwargs) -> None:
+        super().__init__(**kwargs)
         self.sqlite_conn_id = sqlite_conn_id
         self.sql = sql
         self.parameters = parameters or []
diff --git a/airflow/providers/ssh/operators/ssh.py b/airflow/providers/ssh/operators/ssh.py
index 04c996f..53f10b4 100644
--- a/airflow/providers/ssh/operators/ssh.py
+++ b/airflow/providers/ssh/operators/ssh.py
@@ -66,9 +66,8 @@ class SSHOperator(BaseOperator):
                  timeout=10,
                  environment=None,
                  get_pty=False,
-                 *args,
                  **kwargs):
-        super().__init__(*args, **kwargs)
+        super().__init__(**kwargs)
         self.ssh_hook = ssh_hook
         self.ssh_conn_id = ssh_conn_id
         self.remote_host = remote_host
diff --git a/airflow/providers/vertica/operators/vertica.py b/airflow/providers/vertica/operators/vertica.py
index a967950..e72aded 100644
--- a/airflow/providers/vertica/operators/vertica.py
+++ b/airflow/providers/vertica/operators/vertica.py
@@ -41,8 +41,8 @@ class VerticaOperator(BaseOperator):
     @apply_defaults
     def __init__(self, sql: Union[str, List[str]],
                  vertica_conn_id: str = 'vertica_default',
-                 *args: Any, **kwargs: Any) -> None:
-        super().__init__(*args, **kwargs)
+                 **kwargs: Any) -> None:
+        super().__init__(**kwargs)
         self.vertica_conn_id = vertica_conn_id
         self.sql = sql
 
diff --git a/airflow/providers/yandex/operators/yandexcloud_dataproc.py b/airflow/providers/yandex/operators/yandexcloud_dataproc.py
index e72a516..cdcce02 100644
--- a/airflow/providers/yandex/operators/yandexcloud_dataproc.py
+++ b/airflow/providers/yandex/operators/yandexcloud_dataproc.py
@@ -105,9 +105,8 @@ class DataprocCreateClusterOperator(BaseOperator):
                  computenode_disk_type: str = 'network-ssd',
                  computenode_count: int = 0,
                  connection_id: Optional[str] = None,
-                 *arguments,
                  **kwargs):
-        super().__init__(*arguments, **kwargs)
+        super().__init__(**kwargs)
         self.folder_id = folder_id
         self.connection_id = connection_id
         self.cluster_name = cluster_name
@@ -178,9 +177,8 @@ class DataprocDeleteClusterOperator(BaseOperator):
     def __init__(self,
                  connection_id: Optional[str] = None,
                  cluster_id: Optional[str] = None,
-                 *arguments,
                  **kwargs):
-        super().__init__(*arguments, **kwargs)
+        super().__init__(**kwargs)
         self.connection_id = connection_id
         self.cluster_id = cluster_id
         self.hook = None
@@ -231,9 +229,8 @@ class DataprocCreateHiveJobOperator(BaseOperator):
                  name: str = 'Hive job',
                  cluster_id: Optional[str] = None,
                  connection_id: Optional[str] = None,
-                 *arguments,
                  **kwargs):
-        super().__init__(*arguments, **kwargs)
+        super().__init__(**kwargs)
         self.query = query
         self.query_file_uri = query_file_uri
         self.script_variables = script_variables
@@ -305,9 +302,8 @@ class DataprocCreateMapReduceJobOperator(BaseOperator):
                  name: str = 'Mapreduce job',
                  cluster_id: Optional[str] = None,
                  connection_id: Optional[str] = None,
-                 *arguments,
                  **kwargs):
-        super().__init__(*arguments, **kwargs)
+        super().__init__(**kwargs)
         self.main_class = main_class
         self.main_jar_file_uri = main_jar_file_uri
         self.jar_file_uris = jar_file_uris
@@ -382,9 +378,8 @@ class DataprocCreateSparkJobOperator(BaseOperator):
                  name: str = 'Spark job',
                  cluster_id: Optional[str] = None,
                  connection_id: Optional[str] = None,
-                 *arguments,
                  **kwargs):
-        super().__init__(*arguments, **kwargs)
+        super().__init__(**kwargs)
         self.main_class = main_class
         self.main_jar_file_uri = main_jar_file_uri
         self.jar_file_uris = jar_file_uris
@@ -459,9 +454,8 @@ class DataprocCreatePysparkJobOperator(BaseOperator):
                  name: str = 'Pyspark job',
                  cluster_id: Optional[str] = None,
                  connection_id: Optional[str] = None,
-                 *arguments,
                  **kwargs):
-        super().__init__(*arguments, **kwargs)
+        super().__init__(**kwargs)
         self.main_python_file_uri = main_python_file_uri
         self.python_file_uris = python_file_uris
         self.jar_file_uris = jar_file_uris