You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2021/03/03 09:32:24 UTC

[airflow] branch v2-0-test updated (e61cf07 -> 0d78fe1)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a change to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git.


 discard e61cf07  fixup! Switch to f-strings using flynt. (#13732)
 discard 8482e56  Add Azure Data Factory hook (#11015)
 discard f2e6710  Add Tableau provider separate from Salesforce Provider (#14030)
 discard f8bb300  Pin moto to <2 (#14433)
 discard 99714d4  Remove testfixtures module that is only used once (#14318)
 discard 9c65297  Limits Sphinx to <3.5.0 (#14238)
 discard f4602bd  Remove reinstalling azure-storage steps from CI / Breeze (#14102)
 discard 86464f9  Update to Pytest 6.0 (#14065)
 discard cf43aa1  Support google-cloud-logging` >=2.0.0 (#13801)
 discard 76c6843  Support google-cloud-monitoring>=2.0.0 (#13769)
 discard a68c296  Refactor DataprocOperators to support google-cloud-dataproc 2.0 (#13256)
 discard 65eebe5  Support google-cloud-tasks>=2.0.0 (#13347)
 discard b491668  Support google-cloud-automl >=2.1.0 (#13505)
 discard c304840  Support google-cloud-datacatalog>=3.0.0 (#13534)
 discard bc04985  Salesforce provider requires tableau (#13593)
 discard a85ffbd  Support google-cloud-bigquery-datatransfer>=3.0.0 (#13337)
 discard 02d4647  Add timeout option to gcs hook methods. (#13156)
 discard ad61070  Support google-cloud-redis>=2.0.0 (#13117)
 discard 817ad37  Support google-cloud-pubsub>=2.0.0 (#13127)
 discard dd62000  Update compatibility with google-cloud-kms>=2.0 (#13124)
 discard d3ac484  Support google-cloud-datacatalog>=1.0.0 (#13097)
 discard 9118420  Update compatibility with google-cloud-os-login>=2.0.0 (#13126)
 discard b106111  Add Google Cloud Workflows Operators (#13366)
 discard 6bba4e1  Upgrade slack_sdk to v3 (#13745)
 discard fd4c91c  Add Apache Beam operators (#12814)
 discard ef800f7  Fix grammar in production-deployment.rst (#14386)
 discard db77c3e  Minor doc fixes (#14547)
 discard faf47cd  Add Neo4j hook and operator (#13324)
 discard e3e6341  Add more tips about health checks (#14537)
 discard cf1a66f  Add docs about Celery monitoring (#14533)
 discard 2418915  Fix misleading statement on sqlite (#14317)
 discard 4c0463b  Correct PostgreSQL password in doc example code (#14256)
 discard 36f0cb5  Add better description and guidance in case of sqlite version mismatch (#14209)
 discard d8f7781  Update documents for using MySQL (#14174)
 discard c95f409  Document configuration for email backend credentials. (#14006)
 discard d769466  Use DAG context manager in examples (#13297)
 discard 99d1e18  Implement provider versioning tools (#13767)
 discard 5b88b0e  Fix breeze redirect on macOS (#14506)
 discard 8548b03  Log all breeze output to a file automatically (#14470)
 discard 35c26b9  Switch to f-strings using flynt. (#13732)
 discard ddc619d  Disable health checks for ad-hoc containers (#14536)
 discard 598ba31  Add CLI check for scheduler (#14519)
 discard 90ed184  Add health-check for celery worker (#14522)
     new a0c14a3  Disable health checks for ad-hoc containers (#14536)
     new 9588912  Switch to f-strings using flynt. (#13732)
     new 656a467  Log all breeze output to a file automatically (#14470)
     new 3c39b5d  Fix breeze redirect on macOS (#14506)
     new 5896cd0  Implement provider versioning tools (#13767)
     new 7a6d568  Use DAG context manager in examples (#13297)
     new 400e078  Update documents for using MySQL (#14174)
     new f1ae4a4  Add better description and guidance in case of sqlite version mismatch (#14209)
     new c8012cb  Correct PostgreSQL password in doc example code (#14256)
     new 25e482e  Fix misleading statement on sqlite (#14317)
     new 915f3ff  Add more tips about health checks (#14537)
     new 3072feb  Add Neo4j hook and operator (#13324)
     new 35375f5  Minor doc fixes (#14547)
     new aa50ae4  Fix grammar in production-deployment.rst (#14386)
     new f394df3  Add Apache Beam operators (#12814)
     new 36a7383  Upgrade slack_sdk to v3 (#13745)
     new 3f5cc0c  Add Google Cloud Workflows Operators (#13366)
     new e2eceb5  Update compatibility with google-cloud-os-login>=2.0.0 (#13126)
     new 1939446  Support google-cloud-datacatalog>=1.0.0 (#13097)
     new c2c85dd  Update compatibility with google-cloud-kms>=2.0 (#13124)
     new 8faa1bd  Support google-cloud-pubsub>=2.0.0 (#13127)
     new ce6c631  Support google-cloud-redis>=2.0.0 (#13117)
     new 7c5ce8b  Add timeout option to gcs hook methods. (#13156)
     new ce5c00f  Support google-cloud-bigquery-datatransfer>=3.0.0 (#13337)
     new 0150960  Salesforce provider requires tableau (#13593)
     new 02cb5e1  Support google-cloud-datacatalog>=3.0.0 (#13534)
     new a3b6e47  Support google-cloud-automl >=2.1.0 (#13505)
     new c518035  Support google-cloud-tasks>=2.0.0 (#13347)
     new 30bad81  Refactor DataprocOperators to support google-cloud-dataproc 2.0 (#13256)
     new acfe4ae  Support google-cloud-monitoring>=2.0.0 (#13769)
     new be04073  Support google-cloud-logging` >=2.0.0 (#13801)
     new df99938  Update to Pytest 6.0 (#14065)
     new b31484f  Remove reinstalling azure-storage steps from CI / Breeze (#14102)
     new 68b2d9e  Limits Sphinx to <3.5.0 (#14238)
     new ad50ff2  Remove testfixtures module that is only used once (#14318)
     new 80d2644  Pin moto to <2 (#14433)
     new d3359a0  Add Tableau provider separate from Salesforce Provider (#14030)
     new 9ecee99  Add Azure Data Factory hook (#11015)
     new b05faa4  fixup! Switch to f-strings using flynt. (#13732)
     new 1bb177d  Fix failing docs build on Master (#14465)
     new 0d78fe1  fixup! Add Neo4j hook and operator (#13324)

This update added new revisions after undoing existing revisions.
That is to say, some revisions that were in the old version of the
branch are not in the new version.  This situation occurs
when a user --force pushes a change and generates a repository
containing something like this:

 * -- * -- B -- O -- O -- O   (e61cf07)
            \
             N -- N -- N   refs/heads/v2-0-test (0d78fe1)

You should already have received notification emails for all of the O
revisions, and so the following emails describe only the N revisions
from the common base, B.

Any revisions marked "omit" are not gone; other references still
refer to them.  Any revisions marked "discard" are gone forever.

The 41 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 airflow/cli/cli_parser.py                          |  76 ++----------
 airflow/cli/commands/jobs_command.py               |  53 ---------
 airflow/providers/amazon/aws/utils/emailer.py      |  49 --------
 docs/apache-airflow-providers-neo4j/commits.rst    |  10 +-
 docs/apache-airflow-providers-neo4j/index.rst      |  76 ++++++++++++
 docs/apache-airflow-providers-tableau/index.rst    |   6 -
 docs/apache-airflow/howto/email-config.rst         |  34 +-----
 .../logging-monitoring/check-health.rst            |  74 ------------
 docs/apache-airflow/start/docker-compose.yaml      |  12 --
 docs/apache-airflow/start/docker.rst               |   4 +-
 tests/cli/commands/test_jobs_command.py            | 128 ---------------------
 tests/cli/test_cli_parser.py                       |   7 +-
 12 files changed, 99 insertions(+), 430 deletions(-)
 delete mode 100644 airflow/cli/commands/jobs_command.py
 delete mode 100644 airflow/providers/amazon/aws/utils/emailer.py
 delete mode 100644 tests/cli/commands/test_jobs_command.py


[airflow] 22/41: Support google-cloud-redis>=2.0.0 (#13117)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit ce6c631ec0e4b283f949574e8e00478ec7450b8a
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Tue Dec 22 16:25:04 2020 +0100

    Support google-cloud-redis>=2.0.0 (#13117)
    
    (cherry picked from commit 0b626c8042b304a52d6c481fa6eb689d655f33d3)
---
 airflow/providers/google/ADDITIONAL_INFO.md        |  64 +++++++++
 .../example_dags/example_cloud_memorystore.py      |   4 +-
 .../google/cloud/hooks/cloud_memorystore.py        | 144 ++++++++++++++-------
 .../google/cloud/operators/cloud_memorystore.py    |  11 +-
 setup.py                                           |   2 +-
 .../google/cloud/hooks/test_cloud_memorystore.py   |  57 ++++----
 .../cloud/operators/test_cloud_memorystore.py      |   4 +-
 7 files changed, 208 insertions(+), 78 deletions(-)

diff --git a/airflow/providers/google/ADDITIONAL_INFO.md b/airflow/providers/google/ADDITIONAL_INFO.md
new file mode 100644
index 0000000..b54b240
--- /dev/null
+++ b/airflow/providers/google/ADDITIONAL_INFO.md
@@ -0,0 +1,64 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+ -->
+
+# Migration Guide
+
+## 2.0.0
+
+### Update ``google-cloud-*`` libraries
+
+This release of the provider package contains third-party library updates, which may require updating your DAG files or custom hooks and operators, if you were using objects from those libraries. Updating of these libraries is necessary to be able to use new features made available by new versions of the libraries and to obtain bug fixes that are only available for new versions of the library.
+
+Details are covered in the UPDATING.md files for each library, but there are some details that you should pay attention to.
+
+| Library name | Previous constraints | Current constraints | |
+| --- | --- | --- | --- |
+| [``google-cloud-datacatalog``](https://pypi.org/project/google-cloud-datacatalog/) | ``>=0.5.0,<0.8`` | ``>=1.0.0,<2.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-datacatalog/blob/master/UPGRADING.md) |
+| [``google-cloud-os-login``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-oslogin/blob/master/UPGRADING.md) |
+| [``google-cloud-pubsub``](https://pypi.org/project/google-cloud-pubsub/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-pubsub/blob/master/UPGRADING.md) |
+| [``google-cloud-kms``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-kms/blob/master/UPGRADING.md) |
+
+
+### The field names use the snake_case convention
+
+If your DAG uses an object from the above mentioned libraries passed by XCom, it is necessary to update the naming convention of the fields that are read. Previously, the fields used the CamelSnake convention, now the snake_case convention is used.
+
+**Before:**
+
+```python
+set_acl_permission = GCSBucketCreateAclEntryOperator(
+    task_id="gcs-set-acl-permission",
+    bucket=BUCKET_NAME,
+    entity="user-{{ task_instance.xcom_pull('get-instance')['persistenceIamIdentity']"
+    ".split(':', 2)[1] }}",
+    role="OWNER",
+)
+```
+
+**After:**
+
+```python
+set_acl_permission = GCSBucketCreateAclEntryOperator(
+    task_id="gcs-set-acl-permission",
+    bucket=BUCKET_NAME,
+    entity="user-{{ task_instance.xcom_pull('get-instance')['persistence_iam_identity']"
+    ".split(':', 2)[1] }}",
+    role="OWNER",
+)
+```
diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py b/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py
index 441c165..acb50b4 100644
--- a/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py
+++ b/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py
@@ -22,7 +22,7 @@ import os
 from urllib.parse import urlparse
 
 from google.cloud.memcache_v1beta2.types import cloud_memcache
-from google.cloud.redis_v1.gapic.enums import FailoverInstanceRequest, Instance
+from google.cloud.redis_v1 import FailoverInstanceRequest, Instance
 
 from airflow import models
 from airflow.operators.bash import BashOperator
@@ -161,7 +161,7 @@ with models.DAG(
     set_acl_permission = GCSBucketCreateAclEntryOperator(
         task_id="gcs-set-acl-permission",
         bucket=BUCKET_NAME,
-        entity="user-{{ task_instance.xcom_pull('get-instance')['persistenceIamIdentity']"
+        entity="user-{{ task_instance.xcom_pull('get-instance')['persistence_iam_identity']"
         ".split(':', 2)[1] }}",
         role="OWNER",
     )
diff --git a/airflow/providers/google/cloud/hooks/cloud_memorystore.py b/airflow/providers/google/cloud/hooks/cloud_memorystore.py
index bfc01f9..caf1cd6 100644
--- a/airflow/providers/google/cloud/hooks/cloud_memorystore.py
+++ b/airflow/providers/google/cloud/hooks/cloud_memorystore.py
@@ -23,10 +23,14 @@ from google.api_core.exceptions import NotFound
 from google.api_core.retry import Retry
 from google.cloud.memcache_v1beta2 import CloudMemcacheClient
 from google.cloud.memcache_v1beta2.types import cloud_memcache
-from google.cloud.redis_v1 import CloudRedisClient
-from google.cloud.redis_v1.gapic.enums import FailoverInstanceRequest
-from google.cloud.redis_v1.types import FieldMask, InputConfig, Instance, OutputConfig
-from google.protobuf.json_format import ParseDict
+from google.cloud.redis_v1 import (
+    CloudRedisClient,
+    FailoverInstanceRequest,
+    InputConfig,
+    Instance,
+    OutputConfig,
+)
+from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow import version
 from airflow.exceptions import AirflowException
@@ -70,7 +74,7 @@ class CloudMemorystoreHook(GoogleBaseHook):
         )
         self._client: Optional[CloudRedisClient] = None
 
-    def get_conn(self):
+    def get_conn(self) -> CloudRedisClient:
         """Retrieves client library object that allow access to Cloud Memorystore service."""
         if not self._client:
             self._client = CloudRedisClient(credentials=self._get_credentials())
@@ -143,35 +147,36 @@ class CloudMemorystoreHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        parent = CloudRedisClient.location_path(project_id, location)
-        instance_name = CloudRedisClient.instance_path(project_id, location, instance_id)
+        if isinstance(instance, dict):
+            instance = Instance(**instance)
+        elif not isinstance(instance, Instance):
+            raise AirflowException("instance is not instance of Instance type or python dict")
+
+        parent = f"projects/{project_id}/locations/{location}"
+        instance_name = f"projects/{project_id}/locations/{location}/instances/{instance_id}"
         try:
+            self.log.info("Fetching instance: %s", instance_name)
             instance = client.get_instance(
-                name=instance_name, retry=retry, timeout=timeout, metadata=metadata
+                request={'name': instance_name}, retry=retry, timeout=timeout, metadata=metadata or ()
             )
             self.log.info("Instance exists. Skipping creation.")
             return instance
         except NotFound:
             self.log.info("Instance not exists.")
 
-        if isinstance(instance, dict):
-            instance = ParseDict(instance, Instance())
-        elif not isinstance(instance, Instance):
-            raise AirflowException("instance is not instance of Instance type or python dict")
-
         self._append_label(instance, "airflow-version", "v" + version.version)
 
         result = client.create_instance(
-            parent=parent,
-            instance_id=instance_id,
-            instance=instance,
+            request={'parent': parent, 'instance_id': instance_id, 'instance': instance},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         result.result()
         self.log.info("Instance created.")
-        return client.get_instance(name=instance_name, retry=retry, timeout=timeout, metadata=metadata)
+        return client.get_instance(
+            request={'name': instance_name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
 
     @GoogleBaseHook.fallback_to_default_project_id
     def delete_instance(
@@ -203,15 +208,25 @@ class CloudMemorystoreHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = CloudRedisClient.instance_path(project_id, location, instance)
+        name = f"projects/{project_id}/locations/{location}/instances/{instance}"
         self.log.info("Fetching Instance: %s", name)
-        instance = client.get_instance(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        instance = client.get_instance(
+            request={'name': name},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
+        )
 
         if not instance:
             return
 
         self.log.info("Deleting Instance: %s", name)
-        result = client.delete_instance(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        result = client.delete_instance(
+            request={'name': name},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
+        )
         result.result()
         self.log.info("Instance deleted: %s", name)
 
@@ -253,10 +268,13 @@ class CloudMemorystoreHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = CloudRedisClient.instance_path(project_id, location, instance)
+        name = f"projects/{project_id}/locations/{location}/instances/{instance}"
         self.log.info("Exporting Instance: %s", name)
         result = client.export_instance(
-            name=name, output_config=output_config, retry=retry, timeout=timeout, metadata=metadata
+            request={'name': name, 'output_config': output_config},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
         )
         result.result()
         self.log.info("Instance exported: %s", name)
@@ -297,15 +315,14 @@ class CloudMemorystoreHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = CloudRedisClient.instance_path(project_id, location, instance)
+        name = f"projects/{project_id}/locations/{location}/instances/{instance}"
         self.log.info("Failovering Instance: %s", name)
 
         result = client.failover_instance(
-            name=name,
-            data_protection_mode=data_protection_mode,
+            request={'name': name, 'data_protection_mode': data_protection_mode},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         result.result()
         self.log.info("Instance failovered: %s", name)
@@ -340,8 +357,13 @@ class CloudMemorystoreHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = CloudRedisClient.instance_path(project_id, location, instance)
-        result = client.get_instance(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        name = f"projects/{project_id}/locations/{location}/instances/{instance}"
+        result = client.get_instance(
+            request={'name': name},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
+        )
         self.log.info("Fetched Instance: %s", name)
         return result
 
@@ -384,10 +406,13 @@ class CloudMemorystoreHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = CloudRedisClient.instance_path(project_id, location, instance)
+        name = f"projects/{project_id}/locations/{location}/instances/{instance}"
         self.log.info("Importing Instance: %s", name)
         result = client.import_instance(
-            name=name, input_config=input_config, retry=retry, timeout=timeout, metadata=metadata
+            request={'name': name, 'input_config': input_config},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
         )
         result.result()
         self.log.info("Instance imported: %s", name)
@@ -428,9 +453,12 @@ class CloudMemorystoreHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        parent = CloudRedisClient.location_path(project_id, location)
+        parent = f"projects/{project_id}/locations/{location}"
         result = client.list_instances(
-            parent=parent, page_size=page_size, retry=retry, timeout=timeout, metadata=metadata
+            request={'parent': parent, 'page_size': page_size},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
         )
         self.log.info("Fetched instances")
         return result
@@ -485,17 +513,20 @@ class CloudMemorystoreHook(GoogleBaseHook):
         client = self.get_conn()
 
         if isinstance(instance, dict):
-            instance = ParseDict(instance, Instance())
+            instance = Instance(**instance)
         elif not isinstance(instance, Instance):
             raise AirflowException("instance is not instance of Instance type or python dict")
 
         if location and instance_id:
-            name = CloudRedisClient.instance_path(project_id, location, instance_id)
+            name = f"projects/{project_id}/locations/{location}/instances/{instance_id}"
             instance.name = name
 
         self.log.info("Updating instances: %s", instance.name)
         result = client.update_instance(
-            update_mask=update_mask, instance=instance, retry=retry, timeout=timeout, metadata=metadata
+            request={'update_mask': update_mask, 'instance': instance},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
         )
         result.result()
         self.log.info("Instance updated: %s", instance.name)
@@ -610,7 +641,12 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
 
         self.log.info("Applying update to instance: %s", instance_id)
         result = client.apply_parameters(
-            name=name, node_ids=node_ids, apply_all=apply_all, retry=retry, timeout=timeout, metadata=metadata
+            name=name,
+            node_ids=node_ids,
+            apply_all=apply_all,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
         )
         result.result()
         self.log.info("Instance updated: %s", instance_id)
@@ -688,11 +724,16 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
             resource=instance,
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         result.result()
         self.log.info("Instance created.")
-        return client.get_instance(name=instance_name, retry=retry, timeout=timeout, metadata=metadata)
+        return client.get_instance(
+            name=instance_name,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
+        )
 
     @GoogleBaseHook.fallback_to_default_project_id
     def delete_instance(
@@ -727,13 +768,23 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
         metadata = metadata or ()
         name = CloudMemcacheClient.instance_path(project_id, location, instance)
         self.log.info("Fetching Instance: %s", name)
-        instance = client.get_instance(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        instance = client.get_instance(
+            name=name,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
+        )
 
         if not instance:
             return
 
         self.log.info("Deleting Instance: %s", name)
-        result = client.delete_instance(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        result = client.delete_instance(
+            name=name,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
+        )
         result.result()
         self.log.info("Instance deleted: %s", name)
 
@@ -808,7 +859,12 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
         parent = path_template.expand(
             "projects/{project}/locations/{location}", project=project_id, location=location
         )
-        result = client.list_instances(parent=parent, retry=retry, timeout=timeout, metadata=metadata)
+        result = client.list_instances(
+            parent=parent,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
+        )
         self.log.info("Fetched instances")
         return result
 
@@ -871,7 +927,7 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
 
         self.log.info("Updating instances: %s", instance.name)
         result = client.update_instance(
-            update_mask=update_mask, resource=instance, retry=retry, timeout=timeout, metadata=metadata
+            update_mask=update_mask, resource=instance, retry=retry, timeout=timeout, metadata=metadata or ()
         )
         result.result()
         self.log.info("Instance updated: %s", instance.name)
@@ -934,7 +990,7 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
             parameters=parameters,
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         result.result()
         self.log.info("Update staged for instance: %s", instance_id)
diff --git a/airflow/providers/google/cloud/operators/cloud_memorystore.py b/airflow/providers/google/cloud/operators/cloud_memorystore.py
index 0ac2640..64a6251 100644
--- a/airflow/providers/google/cloud/operators/cloud_memorystore.py
+++ b/airflow/providers/google/cloud/operators/cloud_memorystore.py
@@ -20,9 +20,8 @@ from typing import Dict, Optional, Sequence, Tuple, Union
 
 from google.api_core.retry import Retry
 from google.cloud.memcache_v1beta2.types import cloud_memcache
-from google.cloud.redis_v1.gapic.enums import FailoverInstanceRequest
-from google.cloud.redis_v1.types import FieldMask, InputConfig, Instance, OutputConfig
-from google.protobuf.json_format import MessageToDict
+from google.cloud.redis_v1 import FailoverInstanceRequest, InputConfig, Instance, OutputConfig
+from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow.models import BaseOperator
 from airflow.providers.google.cloud.hooks.cloud_memorystore import (
@@ -134,7 +133,7 @@ class CloudMemorystoreCreateInstanceOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(result)
+        return Instance.to_dict(result)
 
 
 class CloudMemorystoreDeleteInstanceOperator(BaseOperator):
@@ -492,7 +491,7 @@ class CloudMemorystoreGetInstanceOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(result)
+        return Instance.to_dict(result)
 
 
 class CloudMemorystoreImportOperator(BaseOperator):
@@ -677,7 +676,7 @@ class CloudMemorystoreListInstancesOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        instances = [MessageToDict(a) for a in result]
+        instances = [Instance.to_dict(a) for a in result]
         return instances
 
 
diff --git a/setup.py b/setup.py
index ff9fd71..ae18e57 100644
--- a/setup.py
+++ b/setup.py
@@ -297,7 +297,7 @@ google = [
     'google-cloud-monitoring>=0.34.0,<2.0.0',
     'google-cloud-os-login>=2.0.0,<3.0.0',
     'google-cloud-pubsub>=2.0.0,<3.0.0',
-    'google-cloud-redis>=0.3.0,<2.0.0',
+    'google-cloud-redis>=2.0.0,<3.0.0',
     'google-cloud-secret-manager>=0.2.0,<2.0.0',
     'google-cloud-spanner>=1.10.0,<2.0.0',
     'google-cloud-speech>=0.36.3,<2.0.0',
diff --git a/tests/providers/google/cloud/hooks/test_cloud_memorystore.py b/tests/providers/google/cloud/hooks/test_cloud_memorystore.py
index 40de3b8..9e6f442 100644
--- a/tests/providers/google/cloud/hooks/test_cloud_memorystore.py
+++ b/tests/providers/google/cloud/hooks/test_cloud_memorystore.py
@@ -85,7 +85,10 @@ class TestCloudMemorystoreWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.get_instance.assert_called_once_with(
-            name=TEST_NAME_DEFAULT_PROJECT_ID, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA
+            request=dict(name=TEST_NAME_DEFAULT_PROJECT_ID),
+            retry=TEST_RETRY,
+            timeout=TEST_TIMEOUT,
+            metadata=TEST_METADATA,
         )
         assert Instance(name=TEST_NAME) == result
 
@@ -116,13 +119,15 @@ class TestCloudMemorystoreWithDefaultProjectIdHook(TestCase):
             ]
         )
         mock_get_conn.return_value.create_instance.assert_called_once_with(
-            instance=Instance(
-                name=TEST_NAME,
-                labels={"airflow-version": "v" + version.version.replace(".", "-").replace("+", "-")},
+            request=dict(
+                parent=TEST_PARENT_DEFAULT_PROJECT_ID,
+                instance=Instance(
+                    name=TEST_NAME,
+                    labels={"airflow-version": "v" + version.version.replace(".", "-").replace("+", "-")},
+                ),
+                instance_id=TEST_INSTANCE_ID,
             ),
-            instance_id=TEST_INSTANCE_ID,
             metadata=TEST_METADATA,
-            parent=TEST_PARENT_DEFAULT_PROJECT_ID,
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
         )
@@ -143,7 +148,10 @@ class TestCloudMemorystoreWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_instance.assert_called_once_with(
-            name=TEST_NAME_DEFAULT_PROJECT_ID, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA
+            request=dict(name=TEST_NAME_DEFAULT_PROJECT_ID),
+            retry=TEST_RETRY,
+            timeout=TEST_TIMEOUT,
+            metadata=TEST_METADATA,
         )
 
     @mock.patch(
@@ -161,7 +169,10 @@ class TestCloudMemorystoreWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.get_instance.assert_called_once_with(
-            name=TEST_NAME_DEFAULT_PROJECT_ID, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA
+            request=dict(name=TEST_NAME_DEFAULT_PROJECT_ID),
+            retry=TEST_RETRY,
+            timeout=TEST_TIMEOUT,
+            metadata=TEST_METADATA,
         )
 
     @mock.patch(
@@ -179,8 +190,7 @@ class TestCloudMemorystoreWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.list_instances.assert_called_once_with(
-            parent=TEST_PARENT_DEFAULT_PROJECT_ID,
-            page_size=TEST_PAGE_SIZE,
+            request=dict(parent=TEST_PARENT_DEFAULT_PROJECT_ID, page_size=TEST_PAGE_SIZE),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -203,8 +213,7 @@ class TestCloudMemorystoreWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.update_instance.assert_called_once_with(
-            update_mask=TEST_UPDATE_MASK,
-            instance=Instance(name=TEST_NAME_DEFAULT_PROJECT_ID),
+            request=dict(update_mask=TEST_UPDATE_MASK, instance=Instance(name=TEST_NAME_DEFAULT_PROJECT_ID)),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -234,7 +243,7 @@ class TestCloudMemorystoreWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.get_instance.assert_called_once_with(
-            name="projects/test-project-id/locations/test-location/instances/test-instance-id",
+            request=dict(name="projects/test-project-id/locations/test-location/instances/test-instance-id"),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -275,13 +284,15 @@ class TestCloudMemorystoreWithoutDefaultProjectIdHook(TestCase):
         )
 
         mock_get_conn.return_value.create_instance.assert_called_once_with(
-            instance=Instance(
-                name=TEST_NAME,
-                labels={"airflow-version": "v" + version.version.replace(".", "-").replace("+", "-")},
+            request=dict(
+                parent=TEST_PARENT,
+                instance=Instance(
+                    name=TEST_NAME,
+                    labels={"airflow-version": "v" + version.version.replace(".", "-").replace("+", "-")},
+                ),
+                instance_id=TEST_INSTANCE_ID,
             ),
-            instance_id=TEST_INSTANCE_ID,
             metadata=TEST_METADATA,
-            parent=TEST_PARENT,
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
         )
@@ -316,7 +327,7 @@ class TestCloudMemorystoreWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_instance.assert_called_once_with(
-            name=TEST_NAME, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA
+            request=dict(name=TEST_NAME), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA
         )
 
     @mock.patch(
@@ -347,7 +358,7 @@ class TestCloudMemorystoreWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.get_instance.assert_called_once_with(
-            name=TEST_NAME, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA
+            request=dict(name=TEST_NAME), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA
         )
 
     @mock.patch(
@@ -378,8 +389,7 @@ class TestCloudMemorystoreWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.list_instances.assert_called_once_with(
-            parent=TEST_PARENT,
-            page_size=TEST_PAGE_SIZE,
+            request=dict(parent=TEST_PARENT, page_size=TEST_PAGE_SIZE),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -413,8 +423,7 @@ class TestCloudMemorystoreWithoutDefaultProjectIdHook(TestCase):
             project_id=TEST_PROJECT_ID,
         )
         mock_get_conn.return_value.update_instance.assert_called_once_with(
-            update_mask=TEST_UPDATE_MASK,
-            instance=Instance(name=TEST_NAME),
+            request=dict(update_mask={'paths': ['memory_size_gb']}, instance=Instance(name=TEST_NAME)),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
diff --git a/tests/providers/google/cloud/operators/test_cloud_memorystore.py b/tests/providers/google/cloud/operators/test_cloud_memorystore.py
index 8ef60bd..6db8a3a 100644
--- a/tests/providers/google/cloud/operators/test_cloud_memorystore.py
+++ b/tests/providers/google/cloud/operators/test_cloud_memorystore.py
@@ -20,7 +20,7 @@ from unittest import TestCase, mock
 
 from google.api_core.retry import Retry
 from google.cloud.memcache_v1beta2.types import cloud_memcache
-from google.cloud.redis_v1.gapic.enums import FailoverInstanceRequest
+from google.cloud.redis_v1 import FailoverInstanceRequest
 from google.cloud.redis_v1.types import Instance
 
 from airflow.providers.google.cloud.operators.cloud_memorystore import (
@@ -78,6 +78,7 @@ class TestCloudMemorystoreCreateInstanceOperator(TestCase):
             gcp_conn_id=TEST_GCP_CONN_ID,
             impersonation_chain=TEST_IMPERSONATION_CHAIN,
         )
+        mock_hook.return_value.create_instance.return_value = Instance(name=TEST_NAME)
         task.execute(mock.MagicMock())
         mock_hook.assert_called_once_with(
             gcp_conn_id=TEST_GCP_CONN_ID,
@@ -199,6 +200,7 @@ class TestCloudMemorystoreGetInstanceOperator(TestCase):
             gcp_conn_id=TEST_GCP_CONN_ID,
             impersonation_chain=TEST_IMPERSONATION_CHAIN,
         )
+        mock_hook.return_value.get_instance.return_value = Instance(name=TEST_NAME)
         task.execute(mock.MagicMock())
         mock_hook.assert_called_once_with(
             gcp_conn_id=TEST_GCP_CONN_ID,


[airflow] 11/41: Add more tips about health checks (#14537)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 915f3ffac02076c90cde4b8132757ef018e70036
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Mon Mar 1 23:04:06 2021 +0100

    Add more tips about health checks (#14537)
    
    * Add more tips about health checks
    
    * fixup! Add more tips about health checks
    
    * Apply suggestions from code review
    
    Co-authored-by: Kamil Breguła <ka...@apache.org>
    (cherry picked from commit b6a4804702711d473c5c55b7aeca2774cd342e22)
---
 docs/apache-airflow/production-deployment.rst | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/docs/apache-airflow/production-deployment.rst b/docs/apache-airflow/production-deployment.rst
index 34b12cb..439afe1 100644
--- a/docs/apache-airflow/production-deployment.rst
+++ b/docs/apache-airflow/production-deployment.rst
@@ -111,11 +111,7 @@ Airflow users occasionally report instances of the scheduler hanging without a t
 * `Scheduler gets stuck without a trace <https://github.com/apache/airflow/issues/7935>`_
 * `Scheduler stopping frequently <https://github.com/apache/airflow/issues/13243>`_
 
-Strategies for mitigation:
-
-* When running on kubernetes, use a ``livenessProbe`` on the scheduler deployment to fail if the scheduler
-  has not heartbeat in a while.
-  `Example: <https://github.com/apache/airflow/blob/190066cf201e5b0442bbbd6df74efecae523ee76/chart/templates/scheduler/scheduler-deployment.yaml#L118-L136>`_.
+To mitigate these issues, make sure you have a :doc:`health check </logging-monitoring/check-health>` set up that will detect when your scheduler has not heartbeat in a while.
 
 .. _docker_image:
 


[airflow] 19/41: Support google-cloud-datacatalog>=1.0.0 (#13097)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 19394462de580996689d9942c9116e9e936723a1
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Tue Dec 22 12:58:45 2020 +0100

    Support google-cloud-datacatalog>=1.0.0 (#13097)
    
    (cherry picked from commit 9a1d3820d6f1373df790da8751f25e723f9ce037)
---
 airflow/providers/google/cloud/hooks/datacatalog.py | 6 +++---
 setup.py                                            | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/airflow/providers/google/cloud/hooks/datacatalog.py b/airflow/providers/google/cloud/hooks/datacatalog.py
index 9c689c3..70b488d 100644
--- a/airflow/providers/google/cloud/hooks/datacatalog.py
+++ b/airflow/providers/google/cloud/hooks/datacatalog.py
@@ -537,7 +537,7 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = DataCatalogClient.field_path(project_id, location, tag_template, field)
+        name = DataCatalogClient.tag_template_field_path(project_id, location, tag_template, field)
 
         self.log.info('Deleting a tag template field: name=%s', name)
         client.delete_tag_template_field(
@@ -860,7 +860,7 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = DataCatalogClient.field_path(project_id, location, tag_template, field)
+        name = DataCatalogClient.tag_template_field_path(project_id, location, tag_template, field)
 
         self.log.info(
             'Renaming field: old_name=%s, new_tag_template_field_id=%s', name, new_tag_template_field_id
@@ -1246,7 +1246,7 @@ class CloudDataCatalogHook(GoogleBaseHook):
         """
         client = self.get_conn()
         if project_id and location and tag_template and tag_template_field_id:
-            tag_template_field_name = DataCatalogClient.field_path(
+            tag_template_field_name = DataCatalogClient.tag_template_field_path(
                 project_id, location, tag_template, tag_template_field_id
             )
 
diff --git a/setup.py b/setup.py
index 0586bf3..63dd6d7 100644
--- a/setup.py
+++ b/setup.py
@@ -287,7 +287,7 @@ google = [
     'google-cloud-bigquery-datatransfer>=0.4.0,<2.0.0',
     'google-cloud-bigtable>=1.0.0,<2.0.0',
     'google-cloud-container>=0.1.1,<2.0.0',
-    'google-cloud-datacatalog>=0.5.0, <0.8',  # TODO: we should migrate to 1.0 likely and add <2.0.0 then
+    'google-cloud-datacatalog>=1.0.0,<2.0.0',
     'google-cloud-dataproc>=1.0.1,<2.0.0',
     'google-cloud-dlp>=0.11.0,<2.0.0',
     'google-cloud-kms>=1.2.1,<2.0.0',


[airflow] 05/41: Implement provider versioning tools (#13767)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 5896cd02ff0c5014ee17c6fe03ce05001b106c0b
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Mon Feb 1 16:46:12 2021 +0100

    Implement provider versioning tools (#13767)
    
    This change implements per-provider versioning tools. Version of the
    providers is retrieved from provider.yaml file (top-level verion).
    Documentation is generated in the documentation folder rather than
    in sources and embedded in provider's index. Backport providers
    remain as they were until we delete all the backport references in
    April 2021 nd then the code can be simplified and the
    backport functionality can be removed then.
    
    When generating multiple providers, only those that have version
    that has no corresponding `providers-<PROVIDER>/<VERSION>` are
    generated. Other providers are skipped with warnings.
    
    Old documentation is removed and new CHANGELOG.rst have been
    prepared for all providers to accomodate to the new process
    (which is comming as a follow-up commit)
    
    Fixes: #13272, #13271, #13274, #13276, #13277, #13275, #13273
    (cherry picked from commit ac2f72c98dc0821b33721054588adbf2bb53bb0b)
---
 .github/workflows/build-images-workflow-run.yml    |    4 +-
 .github/workflows/ci.yml                           |   34 +-
 .github/workflows/scheduled_quarantined.yml        |    2 +-
 BREEZE.rst                                         |   98 +-
 CI.rst                                             |   11 +-
 CONTRIBUTORS_QUICK_START.rst                       |    4 +-
 .../index.rst => airflow/providers/CHANGELOG.rst   |   31 +-
 airflow/providers/README.md                        |   28 -
 .../providers/amazon/{aws => }/ADDITIONAL_INFO.md  |    0
 .../amazon/BACKPORT_PROVIDER_CHANGES_2020.11.23.md |    2 +-
 .../amazon/BACKPORT_PROVIDER_CHANGES_2021.02.05.md |   41 +
 .../providers/amazon/BACKPORT_PROVIDER_README.md   |   43 +-
 .../providers/amazon/CHANGELOG.rst                 |   30 +-
 airflow/providers/amazon/PROVIDER_CHANGES_1.0.0.md |  217 ---
 airflow/providers/amazon/README.md                 |  483 -------
 .../providers/apache/cassandra/CHANGELOG.rst       |   31 +-
 .../apache/cassandra/PROVIDER_CHANGES_1.0.0.md     |   50 -
 airflow/providers/apache/cassandra/README.md       |  144 --
 .../providers/apache/druid/CHANGELOG.rst           |   31 +-
 .../apache/druid/PROVIDER_CHANGES_1.0.0.md         |   52 -
 airflow/providers/apache/druid/README.md           |  176 ---
 .../providers/apache/hdfs/CHANGELOG.rst            |   31 +-
 .../apache/hdfs/PROVIDER_CHANGES_1.0.0.md          |   53 -
 airflow/providers/apache/hdfs/README.md            |  150 ---
 .../providers/apache/hive/CHANGELOG.rst            |   31 +-
 .../apache/hive/PROVIDER_CHANGES_1.0.0.md          |   77 --
 airflow/providers/apache/hive/README.md            |  228 ----
 .../providers/apache/kylin/CHANGELOG.rst           |   31 +-
 .../apache/kylin/PROVIDER_CHANGES_1.0.0.md         |   35 -
 airflow/providers/apache/kylin/README.md           |  128 --
 .../providers/apache/livy/CHANGELOG.rst            |   31 +-
 .../apache/livy/PROVIDER_CHANGES_1.0.0.md          |   47 -
 airflow/providers/apache/livy/README.md            |  162 ---
 .../providers/apache/pig/CHANGELOG.rst             |   31 +-
 .../providers/apache/pig/PROVIDER_CHANGES_1.0.0.md |   51 -
 airflow/providers/apache/pig/README.md             |  137 --
 .../pinot/BACKPORT_PROVIDER_CHANGES_2020.11.23.md  |    2 +-
 .../apache/pinot/BACKPORT_PROVIDER_README.md       |    2 +-
 .../providers/apache/pinot/CHANGELOG.rst           |   31 +-
 .../apache/pinot/PROVIDER_CHANGES_1.0.0.md         |   47 -
 airflow/providers/apache/pinot/README.md           |  128 --
 .../providers/apache/spark/CHANGELOG.rst           |   31 +-
 .../apache/spark/PROVIDER_CHANGES_1.0.0.md         |   64 -
 airflow/providers/apache/spark/README.md           |  161 ---
 .../providers/apache/sqoop/CHANGELOG.rst           |   31 +-
 .../apache/sqoop/PROVIDER_CHANGES_1.0.0.md         |   46 -
 airflow/providers/apache/sqoop/README.md           |  132 --
 .../providers/celery/CHANGELOG.rst                 |   31 +-
 airflow/providers/celery/PROVIDER_CHANGES_1.0.0.md |   41 -
 airflow/providers/celery/README.md                 |  123 --
 .../providers/cloudant/CHANGELOG.rst               |   31 +-
 .../providers/cloudant/PROVIDER_CHANGES_1.0.0.md   |   42 -
 airflow/providers/cloudant/README.md               |  122 --
 .../BACKPORT_PROVIDER_CHANGES_2020.11.23.md        |    2 +-
 .../cncf/kubernetes/BACKPORT_PROVIDER_README.md    |    2 +-
 .../providers/cncf/kubernetes/CHANGELOG.rst        |   31 +-
 .../cncf/kubernetes/PROVIDER_CHANGES_1.0.0.md      |  101 --
 airflow/providers/cncf/kubernetes/README.md        |  221 ----
 .../BACKPORT_PROVIDER_CHANGES_2020.11.23.md        |    2 +-
 .../databricks/BACKPORT_PROVIDER_README.md         |    2 +-
 .../providers/databricks/CHANGELOG.rst             |   31 +-
 .../providers/databricks/PROVIDER_CHANGES_1.0.0.md |   59 -
 airflow/providers/databricks/README.md             |  153 ---
 .../providers/datadog/CHANGELOG.rst                |   31 +-
 .../providers/datadog/PROVIDER_CHANGES_1.0.0.md    |   44 -
 airflow/providers/datadog/README.md                |  137 --
 .../providers/dingding/CHANGELOG.rst               |   31 +-
 .../providers/dingding/PROVIDER_CHANGES_1.0.0.md   |   55 -
 airflow/providers/dingding/README.md               |  157 ---
 .../providers/discord/CHANGELOG.rst                |   31 +-
 .../providers/discord/PROVIDER_CHANGES_1.0.0.md    |   45 -
 airflow/providers/discord/README.md                |  147 ---
 .../providers/docker/CHANGELOG.rst                 |   31 +-
 airflow/providers/docker/PROVIDER_CHANGES_1.0.0.md |   66 -
 airflow/providers/docker/README.md                 |  160 ---
 .../providers/elasticsearch/CHANGELOG.rst          |   31 +-
 .../elasticsearch/PROVIDER_CHANGES_1.0.0.md        |   48 -
 airflow/providers/elasticsearch/README.md          |  130 --
 .../providers/exasol/CHANGELOG.rst                 |   31 +-
 airflow/providers/exasol/PROVIDER_CHANGES_1.0.0.md |   47 -
 airflow/providers/exasol/README.md                 |  140 --
 .../providers/facebook/CHANGELOG.rst               |   31 +-
 .../providers/facebook/PROVIDER_CHANGES_1.0.0.md   |   43 -
 airflow/providers/facebook/README.md               |  123 --
 .../providers/ftp/CHANGELOG.rst                    |   31 +-
 airflow/providers/ftp/PROVIDER_CHANGES_1.0.0.md    |   48 -
 airflow/providers/ftp/README.md                    |  136 --
 .../google/BACKPORT_PROVIDER_CHANGES_2020.11.23.md |    2 +-
 .../providers/google/BACKPORT_PROVIDER_README.md   |    2 +-
 airflow/providers/google/CHANGELOG.rst             |   90 ++
 airflow/providers/google/PROVIDER_CHANGES_1.0.0.md |  377 ------
 airflow/providers/google/README.md                 |  967 --------------
 airflow/providers/google/provider.yaml             |    1 +
 .../providers/grpc/CHANGELOG.rst                   |   31 +-
 airflow/providers/grpc/PROVIDER_CHANGES_1.0.0.md   |   50 -
 airflow/providers/grpc/README.md                   |  145 --
 .../BACKPORT_PROVIDER_CHANGES_2020.11.23.md        |    2 +-
 .../hashicorp/BACKPORT_PROVIDER_README.md          |    2 +-
 .../providers/hashicorp/CHANGELOG.rst              |   31 +-
 .../providers/hashicorp/PROVIDER_CHANGES_1.0.0.md  |   56 -
 airflow/providers/hashicorp/README.md              |  165 ---
 .../http/BACKPORT_PROVIDER_CHANGES_2020.11.23.md   |    2 +-
 airflow/providers/http/BACKPORT_PROVIDER_README.md |    2 +-
 .../providers/http/CHANGELOG.rst                   |   31 +-
 airflow/providers/http/PROVIDER_CHANGES_1.0.0.md   |   65 -
 airflow/providers/http/README.md                   |  164 ---
 .../providers/imap/CHANGELOG.rst                   |   31 +-
 airflow/providers/imap/PROVIDER_CHANGES_1.0.0.md   |   50 -
 airflow/providers/imap/README.md                   |  136 --
 .../providers/jdbc/CHANGELOG.rst                   |   31 +-
 airflow/providers/jdbc/PROVIDER_CHANGES_1.0.0.md   |   52 -
 airflow/providers/jdbc/README.md                   |  145 --
 .../providers/jenkins/CHANGELOG.rst                |   31 +-
 .../providers/jenkins/PROVIDER_CHANGES_1.0.0.md    |   55 -
 airflow/providers/jenkins/README.md                |  148 ---
 .../providers/jira/CHANGELOG.rst                   |   31 +-
 airflow/providers/jira/PROVIDER_CHANGES_1.0.0.md   |   46 -
 airflow/providers/jira/README.md                   |  153 ---
 .../azure/BACKPORT_PROVIDER_CHANGES_2020.11.23.md  |    2 +-
 .../microsoft/azure/BACKPORT_PROVIDER_README.md    |    2 +-
 .../providers/microsoft/azure/CHANGELOG.rst        |   31 +-
 .../microsoft/azure/PROVIDER_CHANGES_1.0.0.md      |   84 --
 airflow/providers/microsoft/azure/README.md        |  285 ----
 .../mssql/BACKPORT_PROVIDER_CHANGES_2020.11.23.md  |    2 +-
 .../microsoft/mssql/BACKPORT_PROVIDER_README.md    |    2 +-
 .../providers/microsoft/mssql/CHANGELOG.rst        |   31 +-
 .../microsoft/mssql/PROVIDER_CHANGES_1.0.0.md      |   49 -
 airflow/providers/microsoft/mssql/README.md        |  158 ---
 .../winrm/BACKPORT_PROVIDER_CHANGES_2020.11.23.md  |    2 +-
 .../microsoft/winrm/BACKPORT_PROVIDER_README.md    |    2 +-
 .../providers/microsoft/winrm/CHANGELOG.rst        |   31 +-
 .../microsoft/winrm/PROVIDER_CHANGES_1.0.0.md      |   48 -
 airflow/providers/microsoft/winrm/README.md        |  141 --
 .../providers/mongo/CHANGELOG.rst                  |   31 +-
 airflow/providers/mongo/PROVIDER_CHANGES_1.0.0.md  |   46 -
 airflow/providers/mongo/README.md                  |  140 --
 .../providers/mysql/CHANGELOG.rst                  |   31 +-
 airflow/providers/mysql/PROVIDER_CHANGES_1.0.0.md  |   61 -
 airflow/providers/mysql/README.md                  |  194 ---
 .../providers/neo4j/CHANGELOG.rst                  |   34 +-
 .../providers/odbc/CHANGELOG.rst                   |   31 +-
 airflow/providers/odbc/PROVIDER_CHANGES_1.0.0.md   |   43 -
 airflow/providers/odbc/README.md                   |  123 --
 .../providers/openfaas/CHANGELOG.rst               |   31 +-
 .../providers/openfaas/PROVIDER_CHANGES_1.0.0.md   |   40 -
 airflow/providers/openfaas/README.md               |  113 --
 .../providers/opsgenie/CHANGELOG.rst               |   31 +-
 .../providers/opsgenie/PROVIDER_CHANGES_1.0.0.md   |   45 -
 airflow/providers/opsgenie/README.md               |  147 ---
 .../providers/oracle/CHANGELOG.rst                 |   31 +-
 airflow/providers/oracle/PROVIDER_CHANGES_1.0.0.md |   51 -
 airflow/providers/oracle/README.md                 |  157 ---
 .../providers/pagerduty/CHANGELOG.rst              |   31 +-
 .../providers/pagerduty/PROVIDER_CHANGES_1.0.0.md  |   40 -
 airflow/providers/pagerduty/README.md              |  120 --
 .../providers/papermill/CHANGELOG.rst              |   31 +-
 .../providers/papermill/PROVIDER_CHANGES_1.0.0.md  |   35 -
 airflow/providers/papermill/README.md              |  121 --
 .../providers/plexus/CHANGELOG.rst                 |   31 +-
 airflow/providers/plexus/PROVIDER_CHANGES_1.0.0.md |   26 -
 airflow/providers/plexus/README.md                 |  119 --
 .../providers/postgres/CHANGELOG.rst               |   31 +-
 .../providers/postgres/PROVIDER_CHANGES_1.0.0.md   |   54 -
 airflow/providers/postgres/README.md               |  163 ---
 .../providers/presto/CHANGELOG.rst                 |   31 +-
 airflow/providers/presto/PROVIDER_CHANGES_1.0.0.md |   48 -
 airflow/providers/presto/README.md                 |  140 --
 .../providers/qubole/CHANGELOG.rst                 |   31 +-
 airflow/providers/qubole/PROVIDER_CHANGES_1.0.0.md |   67 -
 airflow/providers/qubole/README.md                 |  178 ---
 .../providers/redis/CHANGELOG.rst                  |   31 +-
 airflow/providers/redis/PROVIDER_CHANGES_1.0.0.md  |   52 -
 airflow/providers/redis/README.md                  |  159 ---
 .../providers/salesforce/CHANGELOG.rst             |   31 +-
 .../providers/salesforce/PROVIDER_CHANGES_1.0.0.md |   58 -
 airflow/providers/salesforce/README.md             |  171 ---
 .../providers/samba/CHANGELOG.rst                  |   31 +-
 airflow/providers/samba/PROVIDER_CHANGES_1.0.0.md  |   43 -
 airflow/providers/samba/README.md                  |  123 --
 .../providers/segment/CHANGELOG.rst                |   31 +-
 .../providers/segment/PROVIDER_CHANGES_1.0.0.md    |   47 -
 airflow/providers/segment/README.md                |  140 --
 .../providers/sendgrid/CHANGELOG.rst               |   31 +-
 .../providers/sendgrid/PROVIDER_CHANGES_1.0.0.md   |   16 -
 airflow/providers/sendgrid/README.md               |   83 --
 .../providers/sftp/CHANGELOG.rst                   |   31 +-
 airflow/providers/sftp/PROVIDER_CHANGES_1.0.0.md   |   54 -
 airflow/providers/sftp/README.md                   |  178 ---
 .../providers/singularity/CHANGELOG.rst            |   31 +-
 .../singularity/PROVIDER_CHANGES_1.0.0.md          |   44 -
 airflow/providers/singularity/README.md            |  124 --
 .../providers/slack/CHANGELOG.rst                  |   31 +-
 airflow/providers/slack/PROVIDER_CHANGES_1.0.0.md  |   60 -
 .../BACKPORT_PROVIDER_CHANGES_2020.11.23.md        |    2 +-
 .../snowflake/BACKPORT_PROVIDER_README.md          |    2 +-
 .../providers/snowflake/CHANGELOG.rst              |   31 +-
 .../providers/snowflake/PROVIDER_CHANGES_1.0.0.md  |   65 -
 airflow/providers/snowflake/README.md              |  192 ---
 .../providers/sqlite/CHANGELOG.rst                 |   31 +-
 airflow/providers/sqlite/PROVIDER_CHANGES_1.0.0.md |   47 -
 airflow/providers/sqlite/README.md                 |  133 --
 .../providers/ssh/CHANGELOG.rst                    |   31 +-
 airflow/providers/ssh/PROVIDER_CHANGES_1.0.0.md    |   54 -
 airflow/providers/ssh/README.md                    |  149 ---
 .../providers/telegram/CHANGELOG.rst               |   31 +-
 .../providers/telegram/PROVIDER_CHANGES_1.0.0.md   |    8 -
 airflow/providers/telegram/README.md               |  101 --
 .../providers/vertica/CHANGELOG.rst                |   31 +-
 .../providers/vertica/PROVIDER_CHANGES_1.0.0.md    |   45 -
 airflow/providers/vertica/README.md                |  138 --
 .../providers/yandex/CHANGELOG.rst                 |   31 +-
 airflow/providers/yandex/PROVIDER_CHANGES_1.0.0.md |   53 -
 airflow/providers/yandex/README.md                 |  152 ---
 .../providers/zendesk/CHANGELOG.rst                |   31 +-
 .../providers/zendesk/PROVIDER_CHANGES_1.0.0.md    |   42 -
 airflow/providers/zendesk/README.md                |  122 --
 breeze                                             |   98 +-
 breeze-complete                                    |    6 +-
 dev/PROVIDER_PACKAGE_DETAILS.md                    |   14 +-
 ...> README_RELEASE_BACKPORT_PROVIDER_PACKAGES.md} |  287 +---
 dev/README_RELEASE_PROVIDER_PACKAGES.md            |  732 +----------
 dev/import_all_classes.py                          |   23 +-
 .../BACKPORT_PROVIDER_CHANGES_TEMPLATE.md.jinja2   |    7 +
 .../BACKPORT_PROVIDER_CLASSES_TEMPLATE.md.jinja2   |    7 +
 .../BACKPORT_PROVIDER_README_TEMPLATE.md.jinja2    |   11 +-
 ...g.jinja2 => BACKPORT_SETUP_TEMPLATE.cfg.jinja2} |   29 +-
 dev/provider_packages/MANIFEST_TEMPLATE.in.jinja2  |    7 +
 .../PROVIDER_CHANGES_TEMPLATE.md.jinja2            |   22 -
 .../PROVIDER_CLASSES_TEMPLATE.md.jinja2            |   46 -
 .../PROVIDER_COMMITS_TEMPLATE.rst.jinja2           |   55 +
 .../PROVIDER_INDEX_TEMPLATE.rst.jinja2             |   87 ++
 .../PROVIDER_README_TEMPLATE.md.jinja2             |   38 +-
 dev/provider_packages/README.md                    |  411 ++++--
 .../{README.md => README_BACKPORT_PACKAGES.md}     |  107 +-
 dev/provider_packages/SETUP_TEMPLATE.cfg.jinja2    |   25 +-
 dev/provider_packages/SETUP_TEMPLATE.py.jinja2     |   11 +-
 ...ackages.py => copy_provider_package_sources.py} |   73 +-
 .../enter_breeze_provider_package_tests.sh         |    2 +-
 .../get_provider_info_TEMPLATE.py.jinja2           |    8 +
 dev/provider_packages/prepare_provider_packages.py | 1386 +++++++++++++-------
 docs/apache-airflow-providers-google/commits.rst   |  479 +++++++
 docs/apache-airflow-providers-google/index.rst     |  209 +++
 docs/apache-airflow-providers-neo4j/commits.rst    |   41 +
 .../ci/build_airflow/ci_build_airflow_package.sh   |    2 -
 .../ci/docker-compose/local-all-sources.yml        |   37 +-
 scripts/ci/libraries/_build_airflow_packages.sh    |    2 +-
 scripts/ci/libraries/_build_images.sh              |    4 +-
 scripts/ci/libraries/_initialization.sh            |   22 +-
 scripts/ci/libraries/_runs.sh                      |    4 +-
 scripts/ci/libraries/_start_end.sh                 |   32 +-
 .../ci_install_and_test_provider_packages.sh       |    3 +-
 ...mes.sh => ci_prepare_provider_documentation.sh} |    2 +-
 scripts/ci/testing/ci_run_airflow_testing.sh       |    5 +-
 scripts/in_container/_in_container_utils.sh        |  117 +-
 .../run_prepare_provider_documentation.sh          |  124 ++
 .../in_container/run_prepare_provider_packages.sh  |  211 ++-
 .../in_container/run_prepare_provider_readme.sh    |   56 -
 257 files changed, 3316 insertions(+), 17467 deletions(-)

diff --git a/.github/workflows/build-images-workflow-run.yml b/.github/workflows/build-images-workflow-run.yml
index dd85359..e5f8b41 100644
--- a/.github/workflows/build-images-workflow-run.yml
+++ b/.github/workflows/build-images-workflow-run.yml
@@ -22,7 +22,7 @@ on:  # yamllint disable-line rule:truthy
     workflows: ["CI Build"]
     types: ['requested']
 env:
-  MOUNT_LOCAL_SOURCES: "false"
+  MOUNT_SELECTED_LOCAL_SOURCES: "false"
   FORCE_ANSWER_TO_QUESTIONS: "yes"
   FORCE_PULL_IMAGES: "true"
   CHECK_IMAGE_FOR_REBUILD: "true"
@@ -405,6 +405,8 @@ jobs:
       GITHUB_REGISTRY_PULL_IMAGE_TAG: ${{ github.event.workflow_run.id }}
       UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }}
       DOCKER_CACHE: ${{ needs.cancel-workflow-runs.outputs.cacheDirective }}
+      VERSION_SUFFIX_FOR_PYPI: "dev"
+      VERSION_SUFFIX_FOR_SVN: "dev"
     steps:
       - name: >
           Checkout [${{ needs.cancel-workflow-runs.outputs.sourceEvent }}]
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index f7abd6c..54f102a 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -27,7 +27,7 @@ on:  # yamllint disable-line rule:truthy
 
 env:
 
-  MOUNT_LOCAL_SOURCES: "false"
+  MOUNT_SELECTED_LOCAL_SOURCES: "false"
   FORCE_ANSWER_TO_QUESTIONS: "yes"
   FORCE_PULL_IMAGES: "true"
   CHECK_IMAGE_FOR_REBUILD: "true"
@@ -275,7 +275,7 @@ jobs:
     needs: [build-info, ci-images]
     env:
       SKIP: "pylint,identity"
-      MOUNT_LOCAL_SOURCES: "true"
+      MOUNT_SELECTED_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
       GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     if: needs.build-info.outputs.basic-checks-only == 'false'
@@ -327,7 +327,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     needs: [build-info]
     env:
       SKIP: "build,mypy,flake8,pylint,bats-in-container-tests,identity"
-      MOUNT_LOCAL_SOURCES: "true"
+      MOUNT_SELECTED_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
     if: needs.build-info.outputs.basic-checks-only == 'true'
     steps:
@@ -380,7 +380,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     env:
       # We want to make sure we have latest sources as only in_container scripts are added
       # to the image but we want to static-check all of them
-      MOUNT_LOCAL_SOURCES: "true"
+      MOUNT_SELECTED_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
       GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     steps:
@@ -470,6 +470,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
       BACKPORT_PACKAGES: "true"
       VERSION_SUFFIX_FOR_PYPI: "dev"
+      VERSION_SUFFIX_FOR_SVN: "dev"
       PACKAGE_FORMAT: ${{ matrix.package-format }}
       GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     if: needs.build-info.outputs.image-build == 'true'
@@ -487,8 +488,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         if: "!contains(needs.build-info.outputs.runsOn, 'self-hosted')"
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
-      - name: "Prepare provider readmes"
-        run: ./scripts/ci/provider_packages/ci_prepare_provider_readmes.sh
+      - name: "Prepare provider documentation"
+        run: ./scripts/ci/provider_packages/ci_prepare_provider_documentation.sh
       - name: "Prepare provider packages: ${{ matrix.package-format }}"
         run: ./scripts/ci/provider_packages/ci_prepare_provider_packages.sh
       - name: "Install and test provider packages and airflow via ${{ matrix.package-format }} files"
@@ -502,7 +503,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
           name: airflow-backport-packages
           path: "./dist/apache*"
           retention-days: 7
-      - name: "Upload readme artifacts"
+      - name: "Upload documentation artifacts"
         uses: actions/upload-artifact@v2
         if: always() && matrix.package-format == 'wheel'
         with:
@@ -520,6 +521,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       AIRFLOW_EXTRAS: "all"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
       VERSION_SUFFIX_FOR_PYPI: "dev"
+      VERSION_SUFFIX_FOR_SVN: "dev"
       PACKAGE_FORMAT: ${{ matrix.package-format }}
       GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     strategy:
@@ -540,8 +542,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         if: "!contains(needs.build-info.outputs.runsOn, 'self-hosted')"
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
-      - name: "Prepare provider readmes"
-        run: ./scripts/ci/provider_packages/ci_prepare_provider_readmes.sh
+      - name: "Prepare provider documentation"
+        run: ./scripts/ci/provider_packages/ci_prepare_provider_documentation.sh
       - name: "Prepare provider packages: ${{ matrix.package-format }}"
         run: ./scripts/ci/provider_packages/ci_prepare_provider_packages.sh
       - name: "Prepare airflow packages: ${{ matrix.package-format }}"
@@ -555,13 +557,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
           name: airflow-provider-packages
           path: "./dist/apache-*"
           retention-days: 7
-      - name: "Upload readme artifacts"
-        uses: actions/upload-artifact@v2
-        if: always() && matrix.package-format == 'wheel'
-        with:
-          name: airflow-provider-readmes
-          path: "./files/airflow-readme-*"
-          retention-days: 7
 
   test-provider-packages-released-airflow:
     timeout-minutes: 30
@@ -573,6 +568,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       AIRFLOW_EXTRAS: "all"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
       VERSION_SUFFIX_FOR_PYPI: "dev"
+      VERSION_SUFFIX_FOR_SVN: "dev"
       PACKAGE_FORMAT: ${{ matrix.package-format }}
       GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     strategy:
@@ -593,8 +589,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         if: "!contains(needs.build-info.outputs.runsOn, 'self-hosted')"
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
-      - name: "Prepare provider readmes"
-        run: ./scripts/ci/provider_packages/ci_prepare_provider_readmes.sh
+      - name: "Prepare provider documentation"
+        run: ./scripts/ci/provider_packages/ci_prepare_provider_documentation.sh
       - name: "Prepare provider packages: ${{ matrix.package-format }}"
         run: ./scripts/ci/provider_packages/ci_prepare_provider_packages.sh
       - name: "Install and test provider packages and airflow via ${{ matrix.package-format }} files"
@@ -606,7 +602,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
     needs: [build-info, ci-images]
     env:
-      MOUNT_LOCAL_SOURCES: "true"
+      MOUNT_SELECTED_LOCAL_SOURCES: "true"
       RUN_TESTS: true
       TEST_TYPES: "Helm"
       BACKEND: "sqlite"
diff --git a/.github/workflows/scheduled_quarantined.yml b/.github/workflows/scheduled_quarantined.yml
index 0d42088..00663cb 100644
--- a/.github/workflows/scheduled_quarantined.yml
+++ b/.github/workflows/scheduled_quarantined.yml
@@ -23,7 +23,7 @@ on:  # yamllint disable-line rule:truthy
     - cron: '12 */6 * * *'
 
 env:
-  MOUNT_LOCAL_SOURCES: "false"
+  MOUNT_SELECTED_LOCAL_SOURCES: "false"
   FORCE_ANSWER_TO_QUESTIONS: "yes"
   FORCE_PULL_IMAGES: "true"
   CHECK_IMAGE_FOR_REBUILD: "true"
diff --git a/BREEZE.rst b/BREEZE.rst
index 730d6a1..1958697 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -558,20 +558,19 @@ The below example builds provider packages in the wheel format.
      ./breeze prepare-provider-packages
 
 If you run this command without packages, you will prepare all packages, you can however specify
-providers that you would like to build. By default only ``wheel`` packages are prepared,
-but you can change it providing optional --package-format flag.
-
+providers that you would like to build. By default ``both`` types of packages are prepared (
+``wheel`` and ``sdist``, but you can change it providing optional --package-format flag.
 
 .. code-block:: bash
 
-     ./breeze prepare-provider-packages --package-format=both google amazon
+     ./breeze prepare-provider-packages google amazon
 
 You can also prepare backport provider packages, if you specify ``--backport`` flag. You can read more
 about backport packages in `dev <dev/README.md>`_
 
 .. code-block:: bash
 
-     ./breeze prepare-provider-packages --backports --package-format=both google amazon
+     ./breeze prepare-provider-packages --backports google amazon
 
 You can see all providers available by running this command:
 
@@ -588,11 +587,12 @@ You can also prepare airflow packages using breeze:
 
 This prepares airflow .whl package in the dist folder.
 
-Again, you can specify optional ``--package-format`` flag to build airflow packages.
+Again, you can specify optional ``--package-format`` flag to build selected formats of airflow packages,
+default is to build ``both`` type of packages ``sdist`` and ``wheel``.
 
 .. code-block:: bash
 
-     ./breeze prepare-airflow-packages --package-format=bot
+     ./breeze prepare-airflow-packages --package-format=wheel
 
 
 Building Production images
@@ -1167,12 +1167,12 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
   Commands with arguments:
 
-    docker-compose                <ARG>      Executes specified docker-compose command
-    kind-cluster                  <ARG>      Manages KinD cluster on the host
-    prepare-provider-readme       <ARG>      Prepares provider packages readme files
-    prepare-provider-packages     <ARG>      Prepares provider packages
-    static-check                  <ARG>      Performs selected static check for changed files
-    tests                         <ARG>      Runs selected tests in the container
+    docker-compose                     <ARG>      Executes specified docker-compose command
+    kind-cluster                       <ARG>      Manages KinD cluster on the host
+    prepare-provider-documentation     <ARG>      Prepares provider packages documentation
+    prepare-provider-packages          <ARG>      Prepares provider packages
+    static-check                       <ARG>      Performs selected static check for changed files
+    tests                              <ARG>      Runs selected tests in the container
 
   Help commands:
 
@@ -1467,7 +1467,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           and you need to be committer to push to Apache Airflow' GitHub registry.
 
   --github-registry GITHUB_REGISTRY
-          Github registry used. GitHub has legacy Packages registry and Public Beta Container
+          GitHub registry used. GitHub has legacy Packages registry and Public Beta Container
           registry.
 
           Default: docker.pkg.github.com.
@@ -1661,7 +1661,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           and you need to be committer to push to Apache Airflow' GitHub registry.
 
   --github-registry GITHUB_REGISTRY
-          Github registry used. GitHub has legacy Packages registry and Public Beta Container
+          GitHub registry used. GitHub has legacy Packages registry and Public Beta Container
           registry.
 
           Default: docker.pkg.github.com.
@@ -1753,9 +1753,13 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 wheel,sdist,both
+                 both,sdist,wheel
+
+          Default: both
 
-          Default: wheel
+  --backports
+
+          Prepares backport providers rather than regular ones.
 
   -v, --verbose
           Show verbose information about executed docker, kind, kubectl, helm commands. Useful for
@@ -2103,26 +2107,31 @@ This is the current syntax for  `./breeze <./breeze>`_:
   ####################################################################################################
 
 
-  Detailed usage for command: prepare-provider-readme
+  Detailed usage for command: prepare-provider-documentation
+
 
+  breeze prepare-provider-documentation [FLAGS] [YYYY.MM.DD] [PACKAGE_ID ...]
 
-  breeze prepare-provider-readme [FLAGS] [YYYY.MM.DD] [PACKAGE_ID ...]
+        Prepares documentation files for provider packages.
 
-        Prepares README.md files for backport packages. You can provide (after --) optional version
-        in the form of YYYY.MM.DD, optionally followed by the list of packages to generate readme for.
+        The command is optionally followed by the list of packages to generate readme for.
         If the first parameter is not formatted as a date, then today is regenerated.
         If no packages are specified, readme for all packages are generated.
         If no date is specified, current date + 3 days is used (allowing for PMC votes to pass).
 
+        You can also specify --backport flag to prepare backport providers documentation and in this
+        case you can also optionally specify CALVER version as first parameter.
+
         Examples:
 
-        'breeze prepare-provider-readme' or
-        'breeze prepare-provider-readme 2020.05.10' or
-        'breeze prepare-provider-readme 2020.05.10 https google amazon'
+        'breeze prepare-provider-documentation' or
+        'breeze prepare-provider-documentation --version-suffix-for-pypi rc1' or
+        'breeze prepare-provider-documentation --backports 2020.05.10' or
+        'breeze prepare-provider-documentation --backports 2020.05.10 https google amazon'
 
         General form:
 
-        'breeze prepare-provider-readme YYYY.MM.DD <PACKAGE_ID> ...'
+        'breeze prepare-provider-documentation YYYY.MM.DD <PACKAGE_ID> ...'
 
         * YYYY.MM.DD - is the CALVER version of the package to prepare. Note that this date
           cannot be earlier than the already released version (the script will fail if it
@@ -2134,6 +2143,28 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
   Flags:
 
+  -S, --version-suffix-for-pypi SUFFIX
+          Adds optional suffix to the version in the generated backport package. It can be used
+          to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI.
+
+  -N, --version-suffix-for-svn SUFFIX
+          Adds optional suffix to the generated names of package. It can be used to generate
+          rc1/rc2 ... versions of the packages to be uploaded to SVN.
+
+  --package-format PACKAGE_FORMAT
+
+          Chooses format of packages to prepare.
+
+          One of:
+
+                 both,sdist,wheel
+
+          Default: both
+
+  --backports
+
+          Prepares backport providers rather than regular ones.
+
   -v, --verbose
           Show verbose information about executed docker, kind, kubectl, helm commands. Useful for
           debugging - when you run breeze with --verbose flags you will be able to see the commands
@@ -2165,11 +2196,14 @@ This is the current syntax for  `./breeze <./breeze>`_:
         prepared there so make sure you run prepare-provider-packages first,
         and prepare-airflow-packages second.
 
+        You can also specify --backport flag to prepare backport providers or --package-format to
+        prepare one or both types of supported formats.
+
         Examples:
 
         'breeze prepare-provider-packages' or
         'breeze prepare-provider-packages google' or
-        'breeze prepare-provider-packages --package-format both google' or
+        'breeze prepare-provider-packages --package-format wheel google' or
         'breeze prepare-provider-packages --version-suffix-for-svn rc1 http google amazon' or
         'breeze prepare-provider-packages --version-suffix-for-pypi rc1 http google amazon'
         'breeze prepare-provider-packages --version-suffix-for-pypi a1
@@ -2177,7 +2211,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
         General form:
 
-        'breeze prepare-provider-packages [--package-format PACKAGE_FORMAT] \
+        'breeze prepare-provider-packages [--backports] [--package-format PACKAGE_FORMAT] \
               [--version-suffix-for-svn|--version-suffix-for-pypi] <PACKAGE_ID> ...'
 
         * <PACKAGE_ID> is usually directory in the airflow/providers folder (for example
@@ -2192,9 +2226,13 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 wheel,sdist,both
+                 both,sdist,wheel
+
+          Default: both
+
+  --backports
 
-          Default: wheel
+          Prepares backport providers rather than regular ones.
 
   -S, --version-suffix-for-pypi SUFFIX
           Adds optional suffix to the version in the generated backport package. It can be used
@@ -2667,7 +2705,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           and you need to be committer to push to Apache Airflow' GitHub registry.
 
   --github-registry GITHUB_REGISTRY
-          Github registry used. GitHub has legacy Packages registry and Public Beta Container
+          GitHub registry used. GitHub has legacy Packages registry and Public Beta Container
           registry.
 
           Default: docker.pkg.github.com.
diff --git a/CI.rst b/CI.rst
index 20585f0..de5e17c 100644
--- a/CI.rst
+++ b/CI.rst
@@ -178,7 +178,7 @@ You can use those variables when you try to reproduce the build locally.
 +-----------------------------------------+----------------------------------------+-------------------------------------------------+
 |                                                           Mount variables                                                          |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
-| ``MOUNT_LOCAL_SOURCES``                 |     true    |    false    |    false   | Determines whether local sources are            |
+| ``MOUNT_SELECTED_LOCAL_SOURCES``        |     true    |    false    |    false   | Determines whether local sources are            |
 |                                         |             |             |            | mounted to inside the container. Useful for     |
 |                                         |             |             |            | local development, as changes you make          |
 |                                         |             |             |            | locally can be immediately tested in            |
@@ -189,6 +189,15 @@ You can use those variables when you try to reproduce the build locally.
 |                                         |             |             |            | directories) generated locally on the           |
 |                                         |             |             |            | host during development.                        |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
+| ``MOUNT_ALL_LOCAL_SOURCES``             |     false   |    false    |    false   | Determines whether all local sources are        |
+|                                         |             |             |            | mounted to inside the container. Useful for     |
+|                                         |             |             |            | local development when you need to access .git  |
+|                                         |             |             |            | folders and other folders excluded when         |
+|                                         |             |             |            | ``MOUNT_SELECTED_LOCAL_SOURCES`` is true.       |
+|                                         |             |             |            | You might need to manually delete egg-info      |
+|                                         |             |             |            | folder when you enter breeze and the folder was |
+|                                         |             |             |            | generated using different python versions.      |
++-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
 |                                                           Force variables                                                          |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
 | ``FORCE_PULL_IMAGES``                   |    true     |    true     |    true    | Determines if images are force-pulled,          |
diff --git a/CONTRIBUTORS_QUICK_START.rst b/CONTRIBUTORS_QUICK_START.rst
index 3d6ca50..38256ac 100644
--- a/CONTRIBUTORS_QUICK_START.rst
+++ b/CONTRIBUTORS_QUICK_START.rst
@@ -621,7 +621,7 @@ All Tests are inside ./tests directory.
       entrypoint_exec.sh*                         run_install_and_test_provider_packages.sh*
       _in_container_script_init.sh*               run_mypy.sh*
       prod/                                       run_prepare_provider_packages.sh*
-      refresh_pylint_todo.sh*                     run_prepare_provider_readme.sh*
+      refresh_pylint_todo.sh*                     run_prepare_provider_documentation.sh*
       run_ci_tests.sh*                            run_pylint.sh*
       run_clear_tmp.sh*                           run_system_tests.sh*
       run_docs_build.sh*                          run_tmux_welcome.sh*
@@ -836,7 +836,7 @@ To avoid burden on CI infrastructure and to save time, Pre-commit hooks can be r
       entrypoint_exec.sh*                         run_install_and_test_provider_packages.sh*
       _in_container_script_init.sh*               run_mypy.sh*
       prod/                                       run_prepare_provider_packages.sh*
-      refresh_pylint_todo.sh*                     run_prepare_provider_readme.sh*
+      refresh_pylint_todo.sh*                     run_prepare_provider_documentation.sh*
       run_ci_tests.sh*                            run_pylint.sh*
       run_clear_tmp.sh*                           run_system_tests.sh*
       run_docs_build.sh*                          run_tmux_welcome.sh*
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/README.md b/airflow/providers/README.md
deleted file mode 100644
index 3955f29..0000000
--- a/airflow/providers/README.md
+++ /dev/null
@@ -1,28 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-# Airflow Providers
-
-Providers are logical abstractions of submodules that can be used to interface with various tools and endpoints from your Airflow DAGs. Each provider is grouped by the relevant top-level service that a user might need to interact with and submodules for specific forms of interaction, including hooks, operators, sensors, and transfers, exist within each provider directory.
-
-## Using Providers
-
-As of Airflow 2.0, the provider packages contained in this subdirectory will be versioned and released independently of the core Airflow codebase. That means that, in order to use the submodules contained within these provider directories, a user will need to install the relevant provider python package into their Airflow environment. The relevant pip commands to install these providers and their submodules are documented in READMEs within each provider subdirectory.
-
-Note that this does not mean that **all** Airflow operators will be abstracted away into python packages- core Airflow hooks and operators that exist in `airflow/operators` and `airflow/hooks` will continue to be included in core Airflow releases and directly accessible within any Airflow environment.
diff --git a/airflow/providers/amazon/aws/ADDITIONAL_INFO.md b/airflow/providers/amazon/ADDITIONAL_INFO.md
similarity index 100%
rename from airflow/providers/amazon/aws/ADDITIONAL_INFO.md
rename to airflow/providers/amazon/ADDITIONAL_INFO.md
diff --git a/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
index dc4f802..41dd53d 100644
--- a/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
+++ b/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
@@ -4,7 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
 | [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
 | [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
 | [c94b1241a](https://github.com/apache/airflow/commit/c94b1241a144294f5f1c5f461d5e3b92e4a8fc38) | 2020-11-13  | Add extra error handling to S3 remote logging (#9908)                          |
diff --git a/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2021.02.05.md b/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2021.02.05.md
new file mode 100644
index 0000000..886e1a4
--- /dev/null
+++ b/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2021.02.05.md
@@ -0,0 +1,41 @@
+
+
+### Release 2021.2.5
+
+| Commit                                                                                         | Committed   | Subject                                                                                 |
+|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------|
+| [66e82969d](https://github.com/apache/airflow/commit/66e82969dd0ad656618bda4719a545bbaeed5d10) | 2021-01-31  | `Implement provider versioning tools`                                                   |
+| [ecfdc60bb](https://github.com/apache/airflow/commit/ecfdc60bb607fe0d13fa7e315476c607813abab6) | 2021-01-29  | `Add bucket_name to template fileds in S3 operators (#13973)`                           |
+| [d0ab7f6d3](https://github.com/apache/airflow/commit/d0ab7f6d3a2976167f9c4fb309c502a4f866f983) | 2021-01-25  | `Add ExasolToS3Operator (#13847)`                                                       |
+| [6d55f329f](https://github.com/apache/airflow/commit/6d55f329f93c5cd1e94973194c0cd7caa65309e1) | 2021-01-25  | `AWS Glue Crawler Integration (#13072)`                                                 |
+| [f473ca713](https://github.com/apache/airflow/commit/f473ca7130f844bc59477674e641b42b80698bb7) | 2021-01-24  | `Replace &#39;google_cloud_storage_conn_id&#39; by &#39;gcp_conn_id&#39; when using &#39;GCSHook&#39; (#13851)` |
+| [a9ac2b040](https://github.com/apache/airflow/commit/a9ac2b040b64de1aa5d9c2b9def33334e36a8d22) | 2021-01-23  | `Switch to f-strings using flynt. (#13732)`                                             |
+| [3fd5ef355](https://github.com/apache/airflow/commit/3fd5ef355556cf0ad7896bb570bbe4b2eabbf46e) | 2021-01-21  | `Add missing logos for integrations (#13717)`                                           |
+| [29730d720](https://github.com/apache/airflow/commit/29730d720066a4c16d524e905de8cdf07e8cd129) | 2021-01-20  | `Add acl_policy to S3CopyObjectOperator (#13773)`                                       |
+| [c065d3218](https://github.com/apache/airflow/commit/c065d32189bfee80ab938d96ad74f6492e9c9b24) | 2021-01-19  | `AllowDiskUse parameter and docs in MongotoS3Operator (#12033)`                         |
+| [ab5fe56ac](https://github.com/apache/airflow/commit/ab5fe56ac4bda0d3fcdcbf58ed2632255b7ac713) | 2021-01-16  | `Fix bug in GCSToS3Operator (#13718)`                                                   |
+| [04d278f93](https://github.com/apache/airflow/commit/04d278f93ffafb40fb6e95b41ecfa5f5cba5ef98) | 2021-01-13  | `Add S3ToFTPOperator (#11747)`                                                          |
+| [8d42d9ed6](https://github.com/apache/airflow/commit/8d42d9ed69b03b372c6bc01309ef22e01b8db55f) | 2021-01-11  | `add xcom push for ECSOperator (#12096)`                                                |
+| [308f1d066](https://github.com/apache/airflow/commit/308f1d06668ad427fd2483077d8e60f55ee617e6) | 2021-01-07  | `[AIRFLOW-3723] Add Gzip capability to mongo_to_S3 operator (#13187)`                   |
+| [f69405fb0](https://github.com/apache/airflow/commit/f69405fb0b7c236968c730e1ad31a60eea2338c4) | 2021-01-07  | `Fix S3KeysUnchangedSensor so that template_fields work (#13490)`                       |
+| [4e479e1e1](https://github.com/apache/airflow/commit/4e479e1e1b8eea71df48f5cc08a7dd15929ba177) | 2021-01-06  | `Add S3KeySizeSensor (#13049)`                                                          |
+| [f7a1334ab](https://github.com/apache/airflow/commit/f7a1334abe4417409498daad52c97d3f0eb95137) | 2021-01-02  | `Add &#39;mongo_collection&#39; to template_fields in MongoToS3Operator (#13361)`               |
+| [bd74eb0ca](https://github.com/apache/airflow/commit/bd74eb0ca0bb5f81cd98e2c151257a404d4a55a5) | 2020-12-31  | `Allow Tags on AWS Batch Job Submission (#13396)`                                       |
+| [295d66f91](https://github.com/apache/airflow/commit/295d66f91446a69610576d040ba687b38f1c5d0a) | 2020-12-30  | `Fix Grammar in PIP warning (#13380)`                                                   |
+| [625576a3a](https://github.com/apache/airflow/commit/625576a3af470cddad250735b74ba11e4880de0a) | 2020-12-18  | `Fix spelling (#13135)`                                                                 |
+| [6cf76d7ac](https://github.com/apache/airflow/commit/6cf76d7ac01270930de7f105fb26428763ee1d4e) | 2020-12-18  | `Fix typo in pip upgrade command :( (#13148)`                                           |
+| [5090fb0c8](https://github.com/apache/airflow/commit/5090fb0c8967d2d8719c6f4a468f2151395b5444) | 2020-12-15  | `Add script to generate integrations.json (#13073)`                                     |
+| [32971a1a2](https://github.com/apache/airflow/commit/32971a1a2de1db0b4f7442ed26facdf8d3b7a36f) | 2020-12-09  | `Updates providers versions to 1.0.0 (#12955)`                                          |
+| [d5589673a](https://github.com/apache/airflow/commit/d5589673a95aaced0b851ea0a4061a010a924a82) | 2020-12-08  | `Move dummy_operator.py to dummy.py (#11178) (#11293)`                                  |
+| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | `Rename remaing modules to match AIP-21 (#12917)`                                       |
+| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | `Add support for dynamic connection form fields per provider (#12558)`                  |
+| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | `Move operator guides to provider documentation packages (#12681)`                      |
+| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29  | `Don&#39;t use time.time() or timezone.utcnow() for duration calculations (#12353)`         |
+| [de3b1e687](https://github.com/apache/airflow/commit/de3b1e687b26c524c6909b7b4dfbb60d25019751) | 2020-11-28  | `Move connection guides to provider documentation packages (#12653)`                    |
+| [663259d4b](https://github.com/apache/airflow/commit/663259d4b541ab10ce55fec4d2460e23917062c2) | 2020-11-25  | `Fix AWS DataSync tests failing (#11020)`                                               |
+| [3fa51f94d](https://github.com/apache/airflow/commit/3fa51f94d7a17f170ddc31908d36c91f4456a20b) | 2020-11-24  | `Add check for duplicates in provider.yaml files (#12578)`                              |
+| [ed09915a0](https://github.com/apache/airflow/commit/ed09915a02b9b99e60689e647452addaab1688fc) | 2020-11-23  | `[AIRFLOW-5115] Bugfix for S3KeySensor failing to accept template_fields (#12389)`      |
+| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21  | `Fix Python Docstring parameters (#12513)`                                              |
+| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | `Separate out documentation building per provider  (#12444)`                            |
+| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | `Update provider READMEs for 1.0.0b2 batch release (#12449)`                            |
+| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | `Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)`           |
diff --git a/airflow/providers/amazon/BACKPORT_PROVIDER_README.md b/airflow/providers/amazon/BACKPORT_PROVIDER_README.md
index 1d0f309..1ec6e6f 100644
--- a/airflow/providers/amazon/BACKPORT_PROVIDER_README.md
+++ b/airflow/providers/amazon/BACKPORT_PROVIDER_README.md
@@ -20,7 +20,7 @@
 
 # Package apache-airflow-backport-providers-amazon
 
-Release: 2020.11.23
+Release: 2021.2.5
 
 **Table of contents**
 
@@ -60,6 +60,14 @@ While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade pyth
 want to use this backport package.
 
 
+## Change in import paths
+
+If you are upgrading from 2020.10.5 note the following changes in import paths
+
+| Old path                                                        | New path                                                    |
+| --------------------------------------------------------------- | ----------------------------------------------------------- |
+| airflow.providers.amazon.aws.hooks.aws_dynamodb.AwsDynamoDBHook | airflow.providers.amazon.aws.hooks.dynamodb.AwsDynamoDBHook |
+
 
 ## Installation
 
@@ -70,8 +78,9 @@ You can install this package on top of an existing airflow 1.10.* installation v
 
 | PIP package   | Version required   |
 |:--------------|:-------------------|
-| boto3         | &gt;=1.12.0,&lt;2.0.0    |
-| watchtower    | ~=0.7.3            |
+| `boto3`       | `>=1.15.0,<1.16.0` |
+| `botocore`    | `>=1.18.0,<1.19.0` |
+| `watchtower`  | `~=0.7.3`          |
 
 ## Cross provider package dependencies
 
@@ -84,15 +93,17 @@ You can install such cross-provider dependencies when installing from PyPI. For
 pip install apache-airflow-backport-providers-amazon[apache.hive]
 ```
 
-| Dependent package                                                                                                            | Extra       |
-|:-----------------------------------------------------------------------------------------------------------------------------|:------------|
-| [apache-airflow-backport-providers-apache-hive](https://github.com/apache/airflow/tree/master/airflow/providers/apache/hive) | apache.hive |
-| [apache-airflow-backport-providers-google](https://github.com/apache/airflow/tree/master/airflow/providers/google)           | google      |
-| [apache-airflow-backport-providers-imap](https://github.com/apache/airflow/tree/master/airflow/providers/imap)               | imap        |
-| [apache-airflow-backport-providers-mongo](https://github.com/apache/airflow/tree/master/airflow/providers/mongo)             | mongo       |
-| [apache-airflow-backport-providers-mysql](https://github.com/apache/airflow/tree/master/airflow/providers/mysql)             | mysql       |
-| [apache-airflow-backport-providers-postgres](https://github.com/apache/airflow/tree/master/airflow/providers/postgres)       | postgres    |
-| [apache-airflow-backport-providers-ssh](https://github.com/apache/airflow/tree/master/airflow/providers/ssh)                 | ssh         |
+| Dependent package                                                                                                            | Extra         |
+|:-----------------------------------------------------------------------------------------------------------------------------|:--------------|
+| [apache-airflow-backport-providers-apache-hive](https://github.com/apache/airflow/tree/master/airflow/providers/apache/hive) | `apache.hive` |
+| [apache-airflow-backport-providers-exasol](https://github.com/apache/airflow/tree/master/airflow/providers/exasol)           | `exasol`      |
+| [apache-airflow-backport-providers-ftp](https://github.com/apache/airflow/tree/master/airflow/providers/ftp)                 | `ftp`         |
+| [apache-airflow-backport-providers-google](https://github.com/apache/airflow/tree/master/airflow/providers/google)           | `google`      |
+| [apache-airflow-backport-providers-imap](https://github.com/apache/airflow/tree/master/airflow/providers/imap)               | `imap`        |
+| [apache-airflow-backport-providers-mongo](https://github.com/apache/airflow/tree/master/airflow/providers/mongo)             | `mongo`       |
+| [apache-airflow-backport-providers-mysql](https://github.com/apache/airflow/tree/master/airflow/providers/mysql)             | `mysql`       |
+| [apache-airflow-backport-providers-postgres](https://github.com/apache/airflow/tree/master/airflow/providers/postgres)       | `postgres`    |
+| [apache-airflow-backport-providers-ssh](https://github.com/apache/airflow/tree/master/airflow/providers/ssh)                 | `ssh`         |
 
 # Provider classes summary
 
@@ -116,6 +127,7 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/
 | [aws.operators.emr_modify_cluster.EmrModifyClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_modify_cluster.py)                                               |
 | [aws.operators.glacier.GlacierCreateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/glacier.py)                                                                     |
 | [aws.operators.glue.AwsGlueJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/glue.py)                                                                                 |
+| [aws.operators.glue_crawler.AwsGlueCrawlerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/glue_crawler.py)                                                             |
 | [aws.operators.s3_bucket.S3CreateBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py)                                                                   |
 | [aws.operators.s3_bucket.S3DeleteBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py)                                                                   |
 | [aws.operators.s3_file_transform.S3FileTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_file_transform.py)                                                  |
@@ -155,8 +167,10 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/
 
 | New Airflow 2.0 transfers: `airflow.providers.amazon` package                                                                                               |
 |:------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [aws.transfers.exasol_to_s3.ExasolToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/exasol_to_s3.py)       |
 | [aws.transfers.glacier_to_gcs.GlacierToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py) |
 | [aws.transfers.mysql_to_s3.MySQLToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/mysql_to_s3.py)          |
+| [aws.transfers.s3_to_ftp.S3ToFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/s3_to_ftp.py)                |
 
 
 ### Moved transfer operators
@@ -187,7 +201,9 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/
 | [aws.sensors.ec2_instance_state.EC2InstanceStateSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/ec2_instance_state.py)                |
 | [aws.sensors.glacier.GlacierJobOperationSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glacier.py)                                   |
 | [aws.sensors.glue.AwsGlueJobSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glue.py)                                                  |
+| [aws.sensors.glue_crawler.AwsGlueCrawlerSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glue_crawler.py)                              |
 | [aws.sensors.redshift.AwsRedshiftClusterSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/redshift.py)                                  |
+| [aws.sensors.s3_key.S3KeySizeSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/s3_key.py)                                               |
 | [aws.sensors.s3_keys_unchanged.S3KeysUnchangedSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/s3_keys_unchanged.py)                   |
 | [aws.sensors.sagemaker_training.SageMakerTrainingSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_training.py)               |
 | [aws.sensors.step_function_execution.StepFunctionExecutionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/step_function_execution.py) |
@@ -225,6 +241,7 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/
 | [aws.hooks.elasticache_replication_group.ElastiCacheReplicationGroupHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py) |
 | [aws.hooks.glacier.GlacierHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glacier.py)                                                                 |
 | [aws.hooks.glue.AwsGlueJobHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glue.py)                                                                    |
+| [aws.hooks.glue_crawler.AwsGlueCrawlerHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glue_crawler.py)                                                |
 | [aws.hooks.kinesis.AwsFirehoseHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/kinesis.py)                                                             |
 | [aws.hooks.redshift.RedshiftHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/redshift.py)                                                              |
 | [aws.hooks.secrets_manager.SecretsManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/secrets_manager.py)                                          |
@@ -269,7 +286,7 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
 | [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
 | [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
 | [c94b1241a](https://github.com/apache/airflow/commit/c94b1241a144294f5f1c5f461d5e3b92e4a8fc38) | 2020-11-13  | Add extra error handling to S3 remote logging (#9908)                          |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/amazon/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/amazon/CHANGELOG.rst
index 140d473..09a64e2 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/amazon/CHANGELOG.rst
@@ -15,32 +15,12 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
 
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/amazon/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/amazon/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 9716b5a..0000000
--- a/airflow/providers/amazon/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,217 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [d5589673a](https://github.com/apache/airflow/commit/d5589673a95aaced0b851ea0a4061a010a924a82) | 2020-12-08  | Move dummy_operator.py to dummy.py (#11178) (#11293)                                                                                                               |
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                                                                                                   |
-| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29  | Don&#39;t use time.time() or timezone.utcnow() for duration calculations (#12353)                                                                                      |
-| [de3b1e687](https://github.com/apache/airflow/commit/de3b1e687b26c524c6909b7b4dfbb60d25019751) | 2020-11-28  | Move connection guides to provider documentation packages (#12653)                                                                                                 |
-| [663259d4b](https://github.com/apache/airflow/commit/663259d4b541ab10ce55fec4d2460e23917062c2) | 2020-11-25  | Fix AWS DataSync tests failing (#11020)                                                                                                                            |
-| [3fa51f94d](https://github.com/apache/airflow/commit/3fa51f94d7a17f170ddc31908d36c91f4456a20b) | 2020-11-24  | Add check for duplicates in provider.yaml files (#12578)                                                                                                           |
-| [ed09915a0](https://github.com/apache/airflow/commit/ed09915a02b9b99e60689e647452addaab1688fc) | 2020-11-23  | [AIRFLOW-5115] Bugfix for S3KeySensor failing to accept template_fields (#12389)                                                                                   |
-| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21  | Fix Python Docstring parameters (#12513)                                                                                                                           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)                                                                                        |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [c94b1241a](https://github.com/apache/airflow/commit/c94b1241a144294f5f1c5f461d5e3b92e4a8fc38) | 2020-11-13  | Add extra error handling to S3 remote logging (#9908)                                                                                                              |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10  | Fix spelling in Python files (#12230)                                                                                                                              |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08  | Add authentication to AWS with Google credentials (#12079)                                                                                                         |
-| [fb6bddba0](https://github.com/apache/airflow/commit/fb6bddba0c9e3e7ef2610b4fb3f73622e48d7ea0) | 2020-11-07  | In AWS Secrets backend, a lookup is optional (#12143)                                                                                                              |
-| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06  | Simplify string expressions (#12123)                                                                                                                               |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                                                                                               |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [5e77a6154](https://github.com/apache/airflow/commit/5e77a61543d26e5466d885d639247aa5189c011d) | 2020-11-02  | Docstring fix for S3DeleteBucketOperator (#12049)                                                                                                                  |
-| [822285134](https://github.com/apache/airflow/commit/8222851348aa81424c9bdcea994e25e0d6692709) | 2020-10-29  | Add Template Fields to RedshiftToS3Operator &amp; S3ToRedshiftOperator (#11844)                                                                                        |
-| [db121f726](https://github.com/apache/airflow/commit/db121f726b3c7a37aca1ea05eb4714f884456005) | 2020-10-28  | Add truncate table (before copy) option to S3ToRedshiftOperator (#9246)                                                                                            |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26  | Fix spellings (#11825)                                                                                                                                             |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24  | Fix spelling (#11821)                                                                                                                                              |
-| [3934ef224](https://github.com/apache/airflow/commit/3934ef22494db6d9613c229aaa82ea6a366b7c2f) | 2020-10-24  | Remove redundant builtins imports (#11809)                                                                                                                         |
-| [4c8e033c0](https://github.com/apache/airflow/commit/4c8e033c0ee7d28963d504a9216205155f20f58f) | 2020-10-24  | Fix spelling and grammar (#11814)                                                                                                                                  |
-| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24  | Use Python 3 style super classes (#11806)                                                                                                                          |
-| [0df60b773](https://github.com/apache/airflow/commit/0df60b773671ecf8d4e5f582ac2be200cf2a2edd) | 2020-10-23  | Add reattach flag to ECSOperator (#10643)                                                                                                                          |
-| [b9d677cdd](https://github.com/apache/airflow/commit/b9d677cdd660e0be8278a64658e73359276a9682) | 2020-10-22  | Add type hints to  aws provider (#11531)                                                                                                                           |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [674368f66](https://github.com/apache/airflow/commit/674368f66cf61b2a105f326f23868ac3aee08807) | 2020-10-19  | Fixes MySQLToS3 float to int conversion (#10437)                                                                                                                   |
-| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16  | Add type annotations for AWS operators and hooks (#11434)                                                                                                          |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [d38a0a781](https://github.com/apache/airflow/commit/d38a0a781e123c8c50313efdb23f767d6678afe0) | 2020-10-12  | added type hints for aws cloud formation (#11470)                                                                                                                  |
-| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12  | Remove redundant None provided as default to dict.get() (#11448)                                                                                                   |
-| [c3e340584](https://github.com/apache/airflow/commit/c3e340584bf1892c4f73aa9e7495b5823dab0c40) | 2020-10-11  | Change prefix of AwsDynamoDB hook module (#11209)                                                                                                                  |
-| [42a23d16f](https://github.com/apache/airflow/commit/42a23d16fe9b2f165b0805fb767ecbb825c93657) | 2020-10-11  | Update MySQLToS3Operator&#39;s s3_bucket to template_fields (#10778)                                                                                                   |
-| [422b61a9d](https://github.com/apache/airflow/commit/422b61a9dd95ab9d00b239daa14d87d7cae5ae73) | 2020-10-09  | Adding ElastiCache Hook for creating, describing and deleting replication groups (#8701)                                                                           |
-| [dd98b2149](https://github.com/apache/airflow/commit/dd98b21494ff6036242b63268140abe1294b3657) | 2020-10-06  | Add acl_policy parameter to GCSToS3Operator (#10804) (#10829)                                                                                                      |
-| [32b3cfbcf](https://github.com/apache/airflow/commit/32b3cfbcf0209cb062dd641c1232ab25d02d4d6d) | 2020-10-06  | Strict type check for all hooks in amazon (#11250)                                                                                                                 |
-| [6d573e8ab](https://github.com/apache/airflow/commit/6d573e8abbf87e3c7281347e03d428a6e5baccd4) | 2020-10-03  | Add s3 key to template fields for s3/redshift transfer operators (#10890)                                                                                          |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [00ffedb8c](https://github.com/apache/airflow/commit/00ffedb8c402eb5638782628eb706a5f28215eac) | 2020-09-30  | Add amazon glacier to GCS transfer operator (#10947)                                                                                                               |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                                                                                                     |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                                                                                                 |
-| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21  | Add D204 pydocstyle check (#11031)                                                                                                                                 |
-| [2410f592a](https://github.com/apache/airflow/commit/2410f592a4ab160b377f1a9e5de3b7262b9851cc) | 2020-09-19  | Get Airflow configs with sensitive data from AWS Systems Manager (#11023)                                                                                          |
-| [2bf7b7cac](https://github.com/apache/airflow/commit/2bf7b7cac7858f5a6a495f1a9eb4780ec84f95b4) | 2020-09-19  | Add typing to amazon provider EMR (#10910)                                                                                                                         |
-| [9edfcb7ac](https://github.com/apache/airflow/commit/9edfcb7ac46917836ec956264da8876e58d92392) | 2020-09-19  | Support extra_args in S3Hook and GCSToS3Operator (#11001)                                                                                                          |
-| [4e1f3a69d](https://github.com/apache/airflow/commit/4e1f3a69db8614c302e4916332555034053b935c) | 2020-09-14  | [AIRFLOW-10645] Add AWS Secrets Manager Hook (#10655)                                                                                                              |
-| [e9add7916](https://github.com/apache/airflow/commit/e9add79160e3a16bb348e30f4e83386a371dbc1e) | 2020-09-14  | Fix Failing static tests on Master (#10927)                                                                                                                        |
-| [383a118d2](https://github.com/apache/airflow/commit/383a118d2df618e46d81c520cd2c4a31d81b33dd) | 2020-09-14  | Add more type annotations to AWS hooks (#10671)                                                                                                                    |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [2934220dc](https://github.com/apache/airflow/commit/2934220dc98e295764f7791d33e121629ed2fbbb) | 2020-09-08  | Always return a list from S3Hook list methods (#10774)                                                                                                             |
-| [f40ac9b15](https://github.com/apache/airflow/commit/f40ac9b151124dbcd87197d6ae38c85191d41f38) | 2020-09-01  | Add placement_strategy option (#9444)                                                                                                                              |
-| [e4878e677](https://github.com/apache/airflow/commit/e4878e6775bbe5cb2a1d786e57e009271b78bba0) | 2020-08-31  | fix type hints for s3 hook read_key method (#10653)                                                                                                                |
-| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29  | Update Google Cloud branding (#10642)                                                                                                                              |
-| [8969b7185](https://github.com/apache/airflow/commit/8969b7185ebc3c90168ce9a2fb97dfbc74d2bed9) | 2020-08-28  | Removed bad characters from AWS operator (#10590)                                                                                                                  |
-| [8349061f9](https://github.com/apache/airflow/commit/8349061f9cb01a92c87edd349cc844c4053851e8) | 2020-08-26  | Improve Docstring for AWS Athena Hook/Operator (#10580)                                                                                                            |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24  | Implement impersonation in google operators (#10052)                                                                                                               |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [c6358045f](https://github.com/apache/airflow/commit/c6358045f9d61af63c96833cb6682d6f382a6408) | 2020-08-22  | Fixes S3ToRedshift COPY query (#10436)                                                                                                                             |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                                                                                               |
-| [27d08b76a](https://github.com/apache/airflow/commit/27d08b76a2d171d716a1599157a8a60a121dbec6) | 2020-08-21  | Amazon SES Hook (#10391)                                                                                                                                           |
-| [dea345b05](https://github.com/apache/airflow/commit/dea345b05c2cd226e70f97a3934d7456aa1cc754) | 2020-08-17  | Fix AwsGlueJobSensor to stop running after the Glue job finished (#9022)                                                                                           |
-| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12  | Enable Sphinx spellcheck for doc generation (#10280)                                                                                                               |
-| [82f744b87](https://github.com/apache/airflow/commit/82f744b871bb2c5e9a2d628e1c45ae16c1244240) | 2020-08-11  | Add type annotations to AwsGlueJobHook, RedshiftHook modules (#10286)                                                                                              |
-| [19bc97d0c](https://github.com/apache/airflow/commit/19bc97d0ce436a6ec9d8e9a5adcd48c0a769d01f) | 2020-08-10  | Revert &#34;Add Amazon SES hook (#10004)&#34; (#10276)                                                                                                                     |
-| [f06fe616e](https://github.com/apache/airflow/commit/f06fe616e66256bdc53710de505c2c6b1bd21528) | 2020-08-10  | Add Amazon SES hook (#10004)                                                                                                                                       |
-| [0c77ea8a3](https://github.com/apache/airflow/commit/0c77ea8a3c417805f66d10f0c757ca218bf8dee0) | 2020-08-06  | Add type annotations to S3 hook module (#10164)                                                                                                                    |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)                                                                                               |
-| [9667314b2](https://github.com/apache/airflow/commit/9667314b2fb879edc451793a8350123507e1cfd6) | 2020-08-05  | Add correct signatures for operators in amazon provider package (#10167)                                                                                           |
-| [000287753](https://github.com/apache/airflow/commit/000287753b478f29e6c25442ac253e3a6c8e8c87) | 2020-08-03  | Improve Typing coverage of amazon/aws/athena (#10025)                                                                                                              |
-| [53ada6e79](https://github.com/apache/airflow/commit/53ada6e7911f411e80ebb00be9f07a7cc0788d01) | 2020-08-03  | Add S3KeysUnchangedSensor (#9817)                                                                                                                                  |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                                                                                               |
-| [2b8dea64e](https://github.com/apache/airflow/commit/2b8dea64e9e8716fba8c38a1b439f7835bbd2918) | 2020-08-01  | Fix typo in Athena sensor retries (#10079)                                                                                                                         |
-| [1508c43ec](https://github.com/apache/airflow/commit/1508c43ec9594e801b415dd82472fa017791b759) | 2020-07-29  | Adding new SageMaker operator for ProcessingJobs (#9594)                                                                                                           |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25  | Stop using start_date in default_args in example_dags (#9982)                                                                                                      |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                                                                                        |
-| [e7c87fe45](https://github.com/apache/airflow/commit/e7c87fe453c6a70ed087c7ffbccaacbf0d2831b9) | 2020-07-20  | Refactor AwsBaseHook._get_credentials (#9878)                                                                                                                      |
-| [2577f9334](https://github.com/apache/airflow/commit/2577f9334a5cb71cccd97e62b0ae2d097cb99e1a) | 2020-07-16  | Fix S3FileTransformOperator to support S3 Select transformation only (#8936)                                                                                       |
-| [52b6efe1e](https://github.com/apache/airflow/commit/52b6efe1ecaae74b9c2497f565e116305d575a76) | 2020-07-15  | Add option to delete by prefix to S3DeleteObjectsOperator (#9350)                                                                                                  |
-| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13  | Keep functions signatures in decorators (#9786)                                                                                                                    |
-| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08  | Get Airflow configs with sensitive data from Secret Backends (#9645)                                                                                               |
-| [07b81029e](https://github.com/apache/airflow/commit/07b81029ebc2a296fb54181f2cec11fcc7704d9d) | 2020-07-08  | Allow AWSAthenaHook to get more than 1000/first page of results (#6075)                                                                                            |
-| [564192c16](https://github.com/apache/airflow/commit/564192c1625a552456cebb3751978c08eebdb2a1) | 2020-07-08  | Add AWS StepFunctions integrations to the aws provider (#8749)                                                                                                     |
-| [ecce1ace7](https://github.com/apache/airflow/commit/ecce1ace7a277c948c61d7d4cbfc8632cc216559) | 2020-07-08  | [AIRFLOW-XXXX] Remove unnecessary docstring in AWSAthenaOperator                                                                                                   |
-| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06  | Move provider&#39;s log task handlers to the provider package (#9604)                                                                                                  |
-| [ee20086b8](https://github.com/apache/airflow/commit/ee20086b8c499fa40dcaac71652f21b466e7f80f) | 2020-07-02  | Move S3TaskHandler to the AWS provider package (#9602)                                                                                                             |
-| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29  | Remove almost all references to airflow.contrib (#9559)                                                                                                            |
-| [c858babdd](https://github.com/apache/airflow/commit/c858babddf8b18b417993b5bfefec1c5635510da) | 2020-06-26  | Remove kwargs from Super calls in AWS Secrets Backends (#9523)                                                                                                     |
-| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25  | Use literal syntax instead of function calls to create data structure (#9516)                                                                                      |
-| [c7a454aa3](https://github.com/apache/airflow/commit/c7a454aa32bf33133d042e8438ac259b32144b21) | 2020-06-22  | Add AWS ECS system test (#8888)                                                                                                                                    |
-| [df8efd04f](https://github.com/apache/airflow/commit/df8efd04f394afc4b5affb677bc78d8b7bd5275a) | 2020-06-21  | Enable &amp; Fix &#34;Docstring Content Issues&#34; PyDocStyle Check (#9460)                                                                                                   |
-| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21  | Enable &amp; Fix Whitespace related PyDocStyle Checks (#9458)                                                                                                          |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [992a18c84](https://github.com/apache/airflow/commit/992a18c84a355d13e821c703e7364f12233c37dc) | 2020-06-19  | Move MySqlToS3Operator to transfers (#9400)                                                                                                                        |
-| [a60f589aa](https://github.com/apache/airflow/commit/a60f589aa251cc3df6bec5b306ad4a7f736f539f) | 2020-06-19  | Add MySqlToS3Operator (#9054)                                                                                                                                      |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18  | Detect automatically the lack of reference to the guide in the operator descriptions (#9290)                                                                       |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [58a8ec0e4](https://github.com/apache/airflow/commit/58a8ec0e46f624ee0369dd156dd8fb4f81884a21) | 2020-06-16  | AWSBatchOperator &lt;&gt; ClientHook relation changed to composition (#9306)                                                                                             |
-| [a80cd25e8](https://github.com/apache/airflow/commit/a80cd25e8eb7f8b5d89af26cdcd62a5bbe44d65c) | 2020-06-15  | Close/Flush byte stream in s3 hook load_string and load_bytes (#9211)                                                                                              |
-| [ffb857403](https://github.com/apache/airflow/commit/ffb85740373f7adb70d28ec7d5a8886380170e5e) | 2020-06-14  | Decrypt secrets from SystemsManagerParameterStoreBackend (#9214)                                                                                                   |
-| [a69b031f2](https://github.com/apache/airflow/commit/a69b031f20c5a1cd032f9873394374f661811e8f) | 2020-06-10  | Add S3ToRedshift example dag and system test (#8877)                                                                                                               |
-| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02  | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106)                                                                                     |
-| [357e11e0c](https://github.com/apache/airflow/commit/357e11e0cfb4c02833018e073bc4f5e5b52fae4f) | 2020-05-29  | Add Delete/Create S3 bucket operators (#8895)                                                                                                                      |
-| [1ed171bfb](https://github.com/apache/airflow/commit/1ed171bfb265ded8674058bdc425640d25f1f4fc) | 2020-05-28  | Add script_args for S3FileTransformOperator (#9019)                                                                                                                |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
-| [f946f96da](https://github.com/apache/airflow/commit/f946f96da45d8e6101805450d8cab7ccb2774ad0) | 2020-05-23  | Old json boto compat removed from dynamodb_to_s3 operator (#8987)                                                                                                  |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [f4edd90a9](https://github.com/apache/airflow/commit/f4edd90a94b8f91bbefbbbfba367372399559596) | 2020-05-16  | Speed up TestAwsLambdaHook by not actually running a function (#8882)                                                                                              |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [85bbab27d](https://github.com/apache/airflow/commit/85bbab27dbb4f55f6f322b894fe3d54797076c15) | 2020-05-15  | Add EMR operators howto docs (#8863)                                                                                                                               |
-| [e61b9bb9b](https://github.com/apache/airflow/commit/e61b9bb9bbe6d8a0621310f3583483b9135c6770) | 2020-05-13  | Add AWS EMR System tests (#8618)                                                                                                                                   |
-| [ed3f5131a](https://github.com/apache/airflow/commit/ed3f5131a27e2ef0422f2495a4532630a6204f82) | 2020-05-13  | Correctly pass sleep time from AWSAthenaOperator down to the hook. (#8845)                                                                                         |
-| [7236862a1](https://github.com/apache/airflow/commit/7236862a1f5361b5e99c03dd63dae9b966efcd24) | 2020-05-12  | [AIRFLOW-2310] Enable AWS Glue Job Integration (#6007)                                                                                                             |
-| [d590e5e76](https://github.com/apache/airflow/commit/d590e5e7679322bebb1472fa8c7ec6d183e4154a) | 2020-05-11  | Add option to propagate tags in ECSOperator (#8811)                                                                                                                |
-| [0c3db84c3](https://github.com/apache/airflow/commit/0c3db84c3ce5107f53ed5ecc48edfdfe1b97feff) | 2020-05-11  | [AIRFLOW-7068] Create EC2 Hook, Operator and Sensor (#7731)                                                                                                        |
-| [cbebed2b4](https://github.com/apache/airflow/commit/cbebed2b4d0bd1e0984c331c0270e83bf8df8540) | 2020-05-10  | Allow passing backend_kwargs to AWS SSM client (#8802)                                                                                                             |
-| [c7788a689](https://github.com/apache/airflow/commit/c7788a6894cb79c22153434dd9b977393b8236be) | 2020-05-10  | Add imap_attachment_to_s3 example dag and system test (#8669)                                                                                                      |
-| [ff5b70149](https://github.com/apache/airflow/commit/ff5b70149bf51012156378c8fc8b072c7c280d9d) | 2020-05-07  | Add google_api_to_s3_transfer example dags and system tests (#8581)                                                                                                |
-| [4421f011e](https://github.com/apache/airflow/commit/4421f011eeec2d1022a39933e27f530fb9f9c1b1) | 2020-05-01  | Improve template capabilities of EMR job and step operators (#8572)                                                                                                |
-| [379a884d6](https://github.com/apache/airflow/commit/379a884d645a4d73db1c81e3450adc82571989ea) | 2020-04-28  | fix: aws hook should work without conn id (#8534)                                                                                                                  |
-| [74bc316c5](https://github.com/apache/airflow/commit/74bc316c56192f14677e9406d3878887a836062b) | 2020-04-27  | [AIRFLOW-4438] Add Gzip compression to S3_hook (#8571)                                                                                                             |
-| [7ea66a1a9](https://github.com/apache/airflow/commit/7ea66a1a9594704869e82513d3a06fe35b6109b2) | 2020-04-26  | Add example DAG for ECSOperator (#8452)                                                                                                                            |
-| [b6434dedf](https://github.com/apache/airflow/commit/b6434dedf974085e5f8891446fa63104836c8fdf) | 2020-04-24  | [AIRFLOW-7111] Add generate_presigned_url method to S3Hook (#8441)                                                                                                 |
-| [becedd5af](https://github.com/apache/airflow/commit/becedd5af8df01a0210e0a3fa78e619785f39908) | 2020-04-19  | Remove unrelated EC2 references in ECSOperator (#8451)                                                                                                             |
-| [ab1290cb0](https://github.com/apache/airflow/commit/ab1290cb0c5856fa85c8596bfdf780fcdfd99c31) | 2020-04-13  | Make launch_type parameter optional (#8248)                                                                                                                        |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)                                                                                                   |
-| [b46d6c060](https://github.com/apache/airflow/commit/b46d6c060280da59193a28cf67e791eb825cb51c) | 2020-04-08  | Add support for AWS Secrets Manager as Secrets Backend (#8186)                                                                                                     |
-| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04  | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541)                                                                                                         |
-| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31  | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011)                                                                                        |
-| [7239d9a82](https://github.com/apache/airflow/commit/7239d9a82dbb3b9bdf27b531daa70338af9dd796) | 2020-03-28  | Get Airflow Variables from AWS Systems Manager Parameter Store (#7945)                                                                                             |
-| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28  | Make BaseSecretsBackend.build_path generic (#7948)                                                                                                                 |
-| [438da7241](https://github.com/apache/airflow/commit/438da7241eb537e3ef5ae711629446155bf738a3) | 2020-03-28  | [AIRFLOW-5825] SageMakerEndpointOperator is not idempotent (#7891)                                                                                                 |
-| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25  | Standardize SecretBackend class names (#7846)                                                                                                                      |
-| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23  | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830)                                                                                                     |
-| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23  | Add missing call to Super class in &#39;amazon&#39;, &#39;cloudant &amp; &#39;databricks&#39; providers (#7827)                                                                            |
-| [a36002412](https://github.com/apache/airflow/commit/a36002412334c445e4eab41fdbb85ef31b6fd384) | 2020-03-19  | [AIRFLOW-5705] Make AwsSsmSecretsBackend consistent with VaultBackend (#7753)                                                                                      |
-| [2a54512d7](https://github.com/apache/airflow/commit/2a54512d785ba603ba71381dc3dfa049e9f74063) | 2020-03-17  | [AIRFLOW-5705] Fix bugs in AWS SSM Secrets Backend (#7745)                                                                                                         |
-| [a8b5fc74d](https://github.com/apache/airflow/commit/a8b5fc74d07e50c91bb64cb66ca1a450aa5ce6e1) | 2020-03-16  | [AIRFLOW-4175] S3Hook load_file should support ACL policy paramete (#7733)                                                                                         |
-| [e31e9ddd2](https://github.com/apache/airflow/commit/e31e9ddd2332e5d92422baf668acee441646ad68) | 2020-03-14  | [AIRFLOW-5705] Add secrets backend and support for AWS SSM (#6376)                                                                                                 |
-| [3bb60afc7](https://github.com/apache/airflow/commit/3bb60afc7b8319996385d681faac342afe2b3bd2) | 2020-03-13  | [AIRFLOW-6975] Base AWSHook AssumeRoleWithSAML (#7619)                                                                                                             |
-| [c0c5f11ad](https://github.com/apache/airflow/commit/c0c5f11ad11a5a38e0553c1a36aa75eb83efae51) | 2020-03-12  | [AIRFLOW-6884] Make SageMakerTrainingOperator idempotent (#7598)                                                                                                   |
-| [b7cdda1c6](https://github.com/apache/airflow/commit/b7cdda1c64595bc7f85519337029de259e573fce) | 2020-03-10  | [AIRFLOW-4438] Add Gzip compression to S3_hook (#7680)                                                                                                             |
-| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07  | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506)                                                                                                   |
-| [9a94ab246](https://github.com/apache/airflow/commit/9a94ab246db8c09aa83bb6a6d245b1ca9563bcd9) | 2020-03-01  | [AIRFLOW-6962] Fix compeleted to completed (#7600)                                                                                                                 |
-| [1b38f6d9b](https://github.com/apache/airflow/commit/1b38f6d9b6710bd5e25fc16883599f1842ab7cb9) | 2020-02-29  | [AIRFLOW-5908] Add download_file to S3 Hook (#6577)                                                                                                                |
-| [3ea3e1a2b](https://github.com/apache/airflow/commit/3ea3e1a2b580b7ed10efe668de0cc37b03673500) | 2020-02-26  | [AIRFLOW-6824] EMRAddStepsOperator problem with multi-step XCom (#7443)                                                                                            |
-| [6eaa7e3b1](https://github.com/apache/airflow/commit/6eaa7e3b1845644d5ec65a00a997f4029bec9628) | 2020-02-25  | [AIRFLOW-5924] Automatically unify bucket name and key in S3Hook (#6574)                                                                                           |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [7d0e7122d](https://github.com/apache/airflow/commit/7d0e7122dd14576d834c6f66fe919a72b100b7f8) | 2020-02-24  | [AIRFLOW-6830] Add Subject/MessageAttributes to SNS hook and operator (#7451)                                                                                      |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [47a922b86](https://github.com/apache/airflow/commit/47a922b86426968bfa07cc7892d2eeeca761d884) | 2020-02-21  | [AIRFLOW-6854] Fix missing typing_extensions on python 3.8 (#7474)                                                                                                 |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412)                                                                           |
-| [58c3542ed](https://github.com/apache/airflow/commit/58c3542ed25061320ce61dbe0adf451a44c738dd) | 2020-02-12  | [AIRFLOW-5231] Fix S3Hook.delete_objects method (#7375)                                                                                                            |
-| [b7aa778b3](https://github.com/apache/airflow/commit/b7aa778b38df2f116a1c20031e72fea8b97315bf) | 2020-02-10  | [AIRFLOW-6767] Correct name for default Athena workgroup (#7394)                                                                                                   |
-| [9282185e6](https://github.com/apache/airflow/commit/9282185e6624e64bb7f17447f81c1b2d1bb4d56d) | 2020-02-09  | [AIRFLOW-6761] Fix WorkGroup param in AWSAthenaHook (#7386)                                                                                                        |
-| [94fccca97](https://github.com/apache/airflow/commit/94fccca97030ee59d89f302a98137b17e7b01a33) | 2020-02-04  | [AIRFLOW-XXXX] Add pre-commit check for utf-8 file encoding (#7347)                                                                                                |
-| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03  | [AIRFLOW-4681] Make sensors module pylint compatible (#7309)                                                                                                       |
-| [88e40c714](https://github.com/apache/airflow/commit/88e40c714d2853aa8966796945b2907c263fed08) | 2020-02-03  | [AIRFLOW-6716] Fix AWS Datasync Example DAG (#7339)                                                                                                                |
-| [a311d3d82](https://github.com/apache/airflow/commit/a311d3d82e0c2e32bcb56e29f33c95ed0a2a2ddc) | 2020-02-03  | [AIRFLOW-6718] Fix more occurrences of utils.dates.days_ago (#7341)                                                                                                |
-| [cb766b05b](https://github.com/apache/airflow/commit/cb766b05b17b80fd54a5ce6ac3ee35a631115000) | 2020-02-03  | [AIRFLOW-XXXX] Fix Static Checks on CI (#7342)                                                                                                                     |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [7527eddc5](https://github.com/apache/airflow/commit/7527eddc5e9729aa7e732209a07d57985f6c73e4) | 2020-02-02  | [AIRFLOW-4364] Make all code in airflow/providers/amazon pylint compatible (#7336)                                                                                 |
-| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02  | [AIRFLOW-6708] Set unique logger names (#7330)                                                                                                                     |
-| [63aa3db88](https://github.com/apache/airflow/commit/63aa3db88f8824efe79622301efd9f8ba75b991c) | 2020-02-02  | [AIRFLOW-6258] Add CloudFormation operators to AWS providers (#6824)                                                                                               |
-| [af4157fde](https://github.com/apache/airflow/commit/af4157fdeffc0c18492b518708c0db44815067ab) | 2020-02-02  | [AIRFLOW-6672] AWS DataSync - better logging of error message (#7288)                                                                                              |
-| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30  | [AIRFLOW-6682] Move GCP classes to providers package (#7295)                                                                                                       |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [1988a97e8](https://github.com/apache/airflow/commit/1988a97e8f687e28a5a39b29677fb514e097753c) | 2020-01-28  | [AIRFLOW-6659] Move AWS Transfer operators to providers package (#7274)                                                                                            |
-| [ab10443e9](https://github.com/apache/airflow/commit/ab10443e965269efe9c1efaf5fa33bcdbe609f13) | 2020-01-28  | [AIRFLOW-6424] Added a operator to modify EMR cluster (#7213)                                                                                                      |
-| [40246132a](https://github.com/apache/airflow/commit/40246132a7ef3b07fe3173c6e7646ed6b53aad6e) | 2020-01-28  | [AIRFLOW-6654] AWS DataSync - bugfix when creating locations (#7270)                                                                                               |
-| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28  | [AIRFLOW-6655] Move AWS classes to providers (#7271)                                                                                                               |
-| [599e4791c](https://github.com/apache/airflow/commit/599e4791c91cff411b1bf1c45555db5094c2b420) | 2020-01-18  | [AIRFLOW-6541] Use EmrJobFlowSensor for other states (#7146)                                                                                                       |
-| [c319e81ca](https://github.com/apache/airflow/commit/c319e81cae1de31ad1373903252d8608ffce1fba) | 2020-01-17  | [AIRFLOW-6572] Move AWS classes to providers.amazon.aws package (#7178)                                                                                            |
-| [941a07057](https://github.com/apache/airflow/commit/941a070578bc7d9410715b89658548167352cc4d) | 2020-01-15  | [AIRFLOW-6570] Add dag tag for all example dag (#7176)                                                                                                             |
-| [78d8fe694](https://github.com/apache/airflow/commit/78d8fe6944b689b9b0af99255286e34e06eedec3) | 2020-01-08  | [AIRFLOW-6245] Add custom waiters for AWS batch jobs (#6811)                                                                                                       |
-| [e0b022725](https://github.com/apache/airflow/commit/e0b022725749181bd4e30933e4a0ffefb993eede) | 2019-12-28  | [AIRFLOW-6319] Add support for AWS Athena workgroups (#6871)                                                                                                       |
-| [57da45685](https://github.com/apache/airflow/commit/57da45685457520d51a0967e2aeb5e5ff162dfa7) | 2019-12-24  | [AIRFLOW-6333] Bump Pylint to 2.4.4 &amp; fix/disable new checks (#6888)                                                                                               |
-| [cf647c27e](https://github.com/apache/airflow/commit/cf647c27e0f35bbd1183bfcf87a106cbdb69d3fa) | 2019-12-18  | [AIRFLOW-6038] AWS DataSync reworked (#6773)                                                                                                                       |
-| [7502cad28](https://github.com/apache/airflow/commit/7502cad2844139d57e4276d971c0706a361d9dbe) | 2019-12-17  | [AIRFLOW-6206] Move and rename AWS batch operator [AIP-21] (#6764)                                                                                                 |
-| [c4c635df6](https://github.com/apache/airflow/commit/c4c635df6906f56e01724573923e19763bb0da62) | 2019-12-17  | [AIRFLOW-6083] Adding ability to pass custom configuration to lambda client. (#6678)                                                                               |
-| [4fb498f87](https://github.com/apache/airflow/commit/4fb498f87ef89acc30f2576ebc5090ab0653159e) | 2019-12-09  | [AIRFLOW-6072] aws_hook: Outbound http proxy setting and other enhancements (#6686)                                                                                |
-| [a1e2f8635](https://github.com/apache/airflow/commit/a1e2f863526973b17892ec31caf09eded95c1cd2) | 2019-11-20  | [AIRFLOW-6021] Replace list literal with list constructor (#6617)                                                                                                  |
-| [baae14084](https://github.com/apache/airflow/commit/baae140847cdf9d84e905fb6d1f119d6950eecf9) | 2019-11-19  | [AIRFLOW-5781] AIP-21 Migrate AWS Kinesis to /providers/amazon/aws (#6588)                                                                                         |
-| [504cfbac1](https://github.com/apache/airflow/commit/504cfbac1a4ec2e2fd169523ed357808f63881bb) | 2019-11-18  | [AIRFLOW-5783] AIP-21 Move aws redshift into providers structure (#6539)                                                                                           |
-| [992f0e3ac](https://github.com/apache/airflow/commit/992f0e3acf11163294508858515a5f79116e3ad8) | 2019-11-12  | AIRFLOW-5824: AWS DataSync Hook and Operators added (#6512)                                                                                                        |
-| [c015eb2f6](https://github.com/apache/airflow/commit/c015eb2f6496b9721afda9e85d5d4af3bbe0696b) | 2019-11-10  | [AIRFLOW-5786] Migrate AWS SNS to /providers/amazon/aws (#6502)                                                                                                    |
-| [3d76fb4bf](https://github.com/apache/airflow/commit/3d76fb4bf25e5b7d3d30e0d64867b5999b77f0b0) | 2019-11-09  | [AIRFLOW-5782] Migrate AWS Lambda to /providers/amazon/aws [AIP-21] (#6518)                                                                                        |
diff --git a/airflow/providers/amazon/README.md b/airflow/providers/amazon/README.md
deleted file mode 100644
index f361c16..0000000
--- a/airflow/providers/amazon/README.md
+++ /dev/null
@@ -1,483 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-amazon
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Cross provider package dependencies](#cross-provider-package-dependencies)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [New operators](#new-operators)
-        - [Moved operators](#moved-operators)
-    - [Transfer operators](#transfer-operators)
-        - [New transfer operators](#new-transfer-operators)
-        - [Moved transfer operators](#moved-transfer-operators)
-    - [Sensors](#sensors)
-        - [New sensors](#new-sensors)
-        - [Moved sensors](#moved-sensors)
-    - [Hooks](#hooks)
-        - [New hooks](#new-hooks)
-        - [Moved hooks](#moved-hooks)
-    - [Secrets](#secrets)
-        - [Moved secrets](#moved-secrets)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `amazon` provider. All classes for this provider package
-are in `airflow.providers.amazon` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-amazon`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| boto3         | &gt;=1.15.0,&lt;1.16.0   |
-| botocore      | &gt;=1.18.0,&lt;1.19.0   |
-| watchtower    | ~=0.7.3            |
-
-## Cross provider package dependencies
-
-Those are dependencies that might be needed in order to use all the features of the package.
-You need to install the specified backport providers package in order to use them.
-
-You can install such cross-provider dependencies when installing from PyPI. For example:
-
-```bash
-pip install apache-airflow-providers-amazon[apache.hive]
-```
-
-| Dependent package                                                                                     | Extra       |
-|:------------------------------------------------------------------------------------------------------|:------------|
-| [apache-airflow-providers-apache-hive](https://pypi.org/project/apache-airflow-providers-apache-hive) | apache.hive |
-| [apache-airflow-providers-google](https://pypi.org/project/apache-airflow-providers-google)           | google      |
-| [apache-airflow-providers-imap](https://pypi.org/project/apache-airflow-providers-imap)               | imap        |
-| [apache-airflow-providers-mongo](https://pypi.org/project/apache-airflow-providers-mongo)             | mongo       |
-| [apache-airflow-providers-mysql](https://pypi.org/project/apache-airflow-providers-mysql)             | mysql       |
-| [apache-airflow-providers-postgres](https://pypi.org/project/apache-airflow-providers-postgres)       | postgres    |
-| [apache-airflow-providers-ssh](https://pypi.org/project/apache-airflow-providers-ssh)                 | ssh         |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `amazon` provider
-are in the `airflow.providers.amazon` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-### New operators
-
-| New Airflow 2.0 operators: `airflow.providers.amazon` package                                                                                                                                                         |
-|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [aws.operators.cloud_formation.CloudFormationCreateStackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/cloud_formation.py)                                            |
-| [aws.operators.cloud_formation.CloudFormationDeleteStackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/cloud_formation.py)                                            |
-| [aws.operators.datasync.AWSDataSyncOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/datasync.py)                                                                        |
-| [aws.operators.ec2_start_instance.EC2StartInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ec2_start_instance.py)                                               |
-| [aws.operators.ec2_stop_instance.EC2StopInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ec2_stop_instance.py)                                                  |
-| [aws.operators.emr_modify_cluster.EmrModifyClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_modify_cluster.py)                                               |
-| [aws.operators.glacier.GlacierCreateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/glacier.py)                                                                     |
-| [aws.operators.glue.AwsGlueJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/glue.py)                                                                                 |
-| [aws.operators.s3_bucket.S3CreateBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py)                                                                   |
-| [aws.operators.s3_bucket.S3DeleteBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py)                                                                   |
-| [aws.operators.s3_file_transform.S3FileTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_file_transform.py)                                                  |
-| [aws.operators.sagemaker_processing.SageMakerProcessingOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_processing.py)                                        |
-| [aws.operators.step_function_get_execution_output.StepFunctionGetExecutionOutputOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/step_function_get_execution_output.py) |
-| [aws.operators.step_function_start_execution.StepFunctionStartExecutionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/step_function_start_execution.py)               |
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.amazon` package                                                                                                                                    | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                                                |
-|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [aws.operators.athena.AWSAthenaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/athena.py)                                                     | [contrib.operators.aws_athena_operator.AWSAthenaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/aws_athena_operator.py)                                             |
-| [aws.operators.batch.AwsBatchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/batch.py)                                                        | [contrib.operators.awsbatch_operator.AWSBatchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/awsbatch_operator.py)                                                  |
-| [aws.operators.ecs.ECSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ecs.py)                                                                 | [contrib.operators.ecs_operator.ECSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/ecs_operator.py)                                                                 |
-| [aws.operators.emr_add_steps.EmrAddStepsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_add_steps.py)                                     | [contrib.operators.emr_add_steps_operator.EmrAddStepsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_add_steps_operator.py)                                     |
-| [aws.operators.emr_create_job_flow.EmrCreateJobFlowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_create_job_flow.py)                    | [contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_create_job_flow_operator.py)                    |
-| [aws.operators.emr_terminate_job_flow.EmrTerminateJobFlowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py)           | [contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_terminate_job_flow_operator.py)           |
-| [aws.operators.s3_copy_object.S3CopyObjectOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_copy_object.py)                                  | [contrib.operators.s3_copy_object_operator.S3CopyObjectOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_copy_object_operator.py)                                  |
-| [aws.operators.s3_delete_objects.S3DeleteObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_delete_objects.py)                         | [contrib.operators.s3_delete_objects_operator.S3DeleteObjectsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_delete_objects_operator.py)                         |
-| [aws.operators.s3_list.S3ListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_list.py)                                                      | [contrib.operators.s3_list_operator.S3ListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_list_operator.py)                                                      |
-| [aws.operators.sagemaker_base.SageMakerBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_base.py)                                 | [contrib.operators.sagemaker_base_operator.SageMakerBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_base_operator.py)                                 |
-| [aws.operators.sagemaker_endpoint.SageMakerEndpointOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py)                     | [contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_endpoint_operator.py)                     |
-| [aws.operators.sagemaker_endpoint_config.SageMakerEndpointConfigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py) | [contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_endpoint_config_operator.py) |
-| [aws.operators.sagemaker_model.SageMakerModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_model.py)                              | [contrib.operators.sagemaker_model_operator.SageMakerModelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_model_operator.py)                              |
-| [aws.operators.sagemaker_training.SageMakerTrainingOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_training.py)                     | [contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_training_operator.py)                     |
-| [aws.operators.sagemaker_transform.SageMakerTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_transform.py)                  | [contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_transform_operator.py)                  |
-| [aws.operators.sagemaker_tuning.SageMakerTuningOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_tuning.py)                           | [contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_tuning_operator.py)                           |
-| [aws.operators.sns.SnsPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sns.py)                                                          | [contrib.operators.sns_publish_operator.SnsPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sns_publish_operator.py)                                          |
-| [aws.operators.sqs.SQSPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sqs.py)                                                          | [contrib.operators.aws_sqs_publish_operator.SQSPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/aws_sqs_publish_operator.py)                                  |
-
-
-## Transfer operators
-
-
-### New transfer operators
-
-| New Airflow 2.0 transfers: `airflow.providers.amazon` package                                                                                               |
-|:------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [aws.transfers.glacier_to_gcs.GlacierToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py) |
-| [aws.transfers.mysql_to_s3.MySQLToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/mysql_to_s3.py)          |
-
-
-### Moved transfer operators
-
-| Airflow 2.0 transfers: `airflow.providers.amazon` package                                                                                                                       | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                                   |
-|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [aws.transfers.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py)                     | [contrib.operators.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dynamodb_to_s3.py)                                       |
-| [aws.transfers.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/gcs_to_s3.py)                                    | [operators.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/gcs_to_s3.py)                                                                      |
-| [aws.transfers.google_api_to_s3.GoogleApiToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/google_api_to_s3.py)                | [operators.google_api_to_s3_transfer.GoogleApiToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/google_api_to_s3_transfer.py)                                |
-| [aws.transfers.hive_to_dynamodb.HiveToDynamoDBOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py)               | [contrib.operators.hive_to_dynamodb.HiveToDynamoDBOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/hive_to_dynamodb.py)                                 |
-| [aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py) | [contrib.operators.imap_attachment_to_s3_operator.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/imap_attachment_to_s3_operator.py) |
-| [aws.transfers.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/mongo_to_s3.py)                              | [contrib.operators.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mongo_to_s3.py)                                                |
-| [aws.transfers.redshift_to_s3.RedshiftToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/redshift_to_s3.py)                     | [operators.redshift_to_s3_operator.RedshiftToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/redshift_to_s3_operator.py)                                     |
-| [aws.transfers.s3_to_redshift.S3ToRedshiftOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/s3_to_redshift.py)                     | [operators.s3_to_redshift_operator.S3ToRedshiftTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_redshift_operator.py)                                     |
-| [aws.transfers.s3_to_sftp.S3ToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/s3_to_sftp.py)                                 | [contrib.operators.s3_to_sftp_operator.S3ToSFTPOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_sftp_operator.py)                                 |
-| [aws.transfers.sftp_to_s3.SFTPToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/sftp_to_s3.py)                                 | [contrib.operators.sftp_to_s3_operator.SFTPToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sftp_to_s3_operator.py)                                 |
-
-
-## Sensors
-
-
-### New sensors
-
-| New Airflow 2.0 sensors: `airflow.providers.amazon` package                                                                                                                      |
-|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [aws.sensors.cloud_formation.CloudFormationCreateStackSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/cloud_formation.py)             |
-| [aws.sensors.cloud_formation.CloudFormationDeleteStackSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/cloud_formation.py)             |
-| [aws.sensors.ec2_instance_state.EC2InstanceStateSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/ec2_instance_state.py)                |
-| [aws.sensors.glacier.GlacierJobOperationSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glacier.py)                                   |
-| [aws.sensors.glue.AwsGlueJobSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glue.py)                                                  |
-| [aws.sensors.redshift.AwsRedshiftClusterSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/redshift.py)                                  |
-| [aws.sensors.s3_keys_unchanged.S3KeysUnchangedSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/s3_keys_unchanged.py)                   |
-| [aws.sensors.sagemaker_training.SageMakerTrainingSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_training.py)               |
-| [aws.sensors.step_function_execution.StepFunctionExecutionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/step_function_execution.py) |
-
-
-### Moved sensors
-
-| Airflow 2.0 sensors: `airflow.providers.amazon` package                                                                                                                          | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                                        |
-|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [aws.sensors.athena.AthenaSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/athena.py)                                                  | [contrib.sensors.aws_athena_sensor.AthenaSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/aws_athena_sensor.py)                                                  |
-| [aws.sensors.emr_base.EmrBaseSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/emr_base.py)                                             | [contrib.sensors.emr_base_sensor.EmrBaseSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/emr_base_sensor.py)                                                     |
-| [aws.sensors.emr_job_flow.EmrJobFlowSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/emr_job_flow.py)                                  | [contrib.sensors.emr_job_flow_sensor.EmrJobFlowSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/emr_job_flow_sensor.py)                                          |
-| [aws.sensors.emr_step.EmrStepSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/emr_step.py)                                             | [contrib.sensors.emr_step_sensor.EmrStepSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/emr_step_sensor.py)                                                     |
-| [aws.sensors.glue_catalog_partition.AwsGlueCatalogPartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py) | [contrib.sensors.aws_glue_catalog_partition_sensor.AwsGlueCatalogPartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py) |
-| [aws.sensors.s3_key.S3KeySensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/s3_key.py)                                                   | [sensors.s3_key_sensor.S3KeySensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/s3_key_sensor.py)                                                                           |
-| [aws.sensors.s3_prefix.S3PrefixSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/s3_prefix.py)                                          | [sensors.s3_prefix_sensor.S3PrefixSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/s3_prefix_sensor.py)                                                                  |
-| [aws.sensors.sagemaker_base.SageMakerBaseSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_base.py)                           | [contrib.sensors.sagemaker_base_sensor.SageMakerBaseSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_base_sensor.py)                                   |
-| [aws.sensors.sagemaker_endpoint.SageMakerEndpointSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_endpoint.py)               | [contrib.sensors.sagemaker_endpoint_sensor.SageMakerEndpointSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_endpoint_sensor.py)                       |
-| [aws.sensors.sagemaker_transform.SageMakerTransformSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_transform.py)            | [contrib.sensors.sagemaker_transform_sensor.SageMakerTransformSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_transform_sensor.py)                    |
-| [aws.sensors.sagemaker_tuning.SageMakerTuningSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_tuning.py)                     | [contrib.sensors.sagemaker_tuning_sensor.SageMakerTuningSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_tuning_sensor.py)                             |
-| [aws.sensors.sqs.SQSSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sqs.py)                                                           | [contrib.sensors.aws_sqs_sensor.SQSSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/aws_sqs_sensor.py)                                                           |
-
-
-## Hooks
-
-
-### New hooks
-
-| New Airflow 2.0 hooks: `airflow.providers.amazon` package                                                                                                                                    |
-|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [aws.hooks.batch_client.AwsBatchClientHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/batch_client.py)                                                |
-| [aws.hooks.batch_waiters.AwsBatchWaitersHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/batch_waiters.py)                                             |
-| [aws.hooks.cloud_formation.AWSCloudFormationHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/cloud_formation.py)                                       |
-| [aws.hooks.ec2.EC2Hook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/ec2.py)                                                                             |
-| [aws.hooks.elasticache_replication_group.ElastiCacheReplicationGroupHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py) |
-| [aws.hooks.glacier.GlacierHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glacier.py)                                                                 |
-| [aws.hooks.glue.AwsGlueJobHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glue.py)                                                                    |
-| [aws.hooks.kinesis.AwsFirehoseHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/kinesis.py)                                                             |
-| [aws.hooks.redshift.RedshiftHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/redshift.py)                                                              |
-| [aws.hooks.secrets_manager.SecretsManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/secrets_manager.py)                                          |
-| [aws.hooks.ses.SESHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/ses.py)                                                                             |
-| [aws.hooks.step_function.StepFunctionHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/step_function.py)                                                |
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.amazon` package                                                                                          | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                 |
-|:-----------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [aws.hooks.athena.AWSAthenaHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/athena.py)                   | [contrib.hooks.aws_athena_hook.AWSAthenaHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_athena_hook.py)                  |
-| [aws.hooks.base_aws.AwsBaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/base_aws.py)                 | [contrib.hooks.aws_hook.AwsHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_hook.py)                                      |
-| [aws.hooks.datasync.AWSDataSyncHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/datasync.py)             | [contrib.hooks.aws_datasync_hook.AWSDataSyncHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_datasync_hook.py)            |
-| [aws.hooks.dynamodb.AwsDynamoDBHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/dynamodb.py)             | [contrib.hooks.aws_dynamodb_hook.AwsDynamoDBHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_dynamodb_hook.py)            |
-| [aws.hooks.emr.EmrHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/emr.py)                               | [contrib.hooks.emr_hook.EmrHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/emr_hook.py)                                      |
-| [aws.hooks.glue_catalog.AwsGlueCatalogHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glue_catalog.py)  | [contrib.hooks.aws_glue_catalog_hook.AwsGlueCatalogHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_glue_catalog_hook.py) |
-| [aws.hooks.lambda_function.AwsLambdaHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/lambda_function.py) | [contrib.hooks.aws_lambda_hook.AwsLambdaHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_lambda_hook.py)                  |
-| [aws.hooks.logs.AwsLogsHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/logs.py)                         | [contrib.hooks.aws_logs_hook.AwsLogsHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_logs_hook.py)                        |
-| [aws.hooks.s3.S3Hook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/s3.py)                                  | [hooks.S3_hook.S3Hook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/S3_hook.py)                                                         |
-| [aws.hooks.sagemaker.SageMakerHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/sagemaker.py)             | [contrib.hooks.sagemaker_hook.SageMakerHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/sagemaker_hook.py)                    |
-| [aws.hooks.sns.AwsSnsHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/sns.py)                            | [contrib.hooks.aws_sns_hook.AwsSnsHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_sns_hook.py)                           |
-| [aws.hooks.sqs.SQSHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/sqs.py)                               | [contrib.hooks.aws_sqs_hook.SQSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_sqs_hook.py)                              |
-
-
-## Secrets
-
-
-
-### Moved secrets
-
-| Airflow 2.0 secrets: `airflow.providers.amazon` package                                                                                                                  | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                  |
-|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [aws.secrets.secrets_manager.SecretsManagerBackend](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/secrets/secrets_manager.py)               | [contrib.secrets.aws_secrets_manager.SecretsManagerBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/aws_secrets_manager.py)               |
-| [aws.secrets.systems_manager.SystemsManagerParameterStoreBackend](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/secrets/systems_manager.py) | [contrib.secrets.aws_systems_manager.SystemsManagerParameterStoreBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/aws_systems_manager.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [d5589673a](https://github.com/apache/airflow/commit/d5589673a95aaced0b851ea0a4061a010a924a82) | 2020-12-08  | Move dummy_operator.py to dummy.py (#11178) (#11293)                                                                                                               |
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                                                                                                   |
-| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29  | Don&#39;t use time.time() or timezone.utcnow() for duration calculations (#12353)                                                                                      |
-| [de3b1e687](https://github.com/apache/airflow/commit/de3b1e687b26c524c6909b7b4dfbb60d25019751) | 2020-11-28  | Move connection guides to provider documentation packages (#12653)                                                                                                 |
-| [663259d4b](https://github.com/apache/airflow/commit/663259d4b541ab10ce55fec4d2460e23917062c2) | 2020-11-25  | Fix AWS DataSync tests failing (#11020)                                                                                                                            |
-| [3fa51f94d](https://github.com/apache/airflow/commit/3fa51f94d7a17f170ddc31908d36c91f4456a20b) | 2020-11-24  | Add check for duplicates in provider.yaml files (#12578)                                                                                                           |
-| [ed09915a0](https://github.com/apache/airflow/commit/ed09915a02b9b99e60689e647452addaab1688fc) | 2020-11-23  | [AIRFLOW-5115] Bugfix for S3KeySensor failing to accept template_fields (#12389)                                                                                   |
-| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21  | Fix Python Docstring parameters (#12513)                                                                                                                           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)                                                                                        |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [c94b1241a](https://github.com/apache/airflow/commit/c94b1241a144294f5f1c5f461d5e3b92e4a8fc38) | 2020-11-13  | Add extra error handling to S3 remote logging (#9908)                                                                                                              |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10  | Fix spelling in Python files (#12230)                                                                                                                              |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08  | Add authentication to AWS with Google credentials (#12079)                                                                                                         |
-| [fb6bddba0](https://github.com/apache/airflow/commit/fb6bddba0c9e3e7ef2610b4fb3f73622e48d7ea0) | 2020-11-07  | In AWS Secrets backend, a lookup is optional (#12143)                                                                                                              |
-| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06  | Simplify string expressions (#12123)                                                                                                                               |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                                                                                               |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [5e77a6154](https://github.com/apache/airflow/commit/5e77a61543d26e5466d885d639247aa5189c011d) | 2020-11-02  | Docstring fix for S3DeleteBucketOperator (#12049)                                                                                                                  |
-| [822285134](https://github.com/apache/airflow/commit/8222851348aa81424c9bdcea994e25e0d6692709) | 2020-10-29  | Add Template Fields to RedshiftToS3Operator &amp; S3ToRedshiftOperator (#11844)                                                                                        |
-| [db121f726](https://github.com/apache/airflow/commit/db121f726b3c7a37aca1ea05eb4714f884456005) | 2020-10-28  | Add truncate table (before copy) option to S3ToRedshiftOperator (#9246)                                                                                            |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26  | Fix spellings (#11825)                                                                                                                                             |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24  | Fix spelling (#11821)                                                                                                                                              |
-| [3934ef224](https://github.com/apache/airflow/commit/3934ef22494db6d9613c229aaa82ea6a366b7c2f) | 2020-10-24  | Remove redundant builtins imports (#11809)                                                                                                                         |
-| [4c8e033c0](https://github.com/apache/airflow/commit/4c8e033c0ee7d28963d504a9216205155f20f58f) | 2020-10-24  | Fix spelling and grammar (#11814)                                                                                                                                  |
-| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24  | Use Python 3 style super classes (#11806)                                                                                                                          |
-| [0df60b773](https://github.com/apache/airflow/commit/0df60b773671ecf8d4e5f582ac2be200cf2a2edd) | 2020-10-23  | Add reattach flag to ECSOperator (#10643)                                                                                                                          |
-| [b9d677cdd](https://github.com/apache/airflow/commit/b9d677cdd660e0be8278a64658e73359276a9682) | 2020-10-22  | Add type hints to  aws provider (#11531)                                                                                                                           |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [674368f66](https://github.com/apache/airflow/commit/674368f66cf61b2a105f326f23868ac3aee08807) | 2020-10-19  | Fixes MySQLToS3 float to int conversion (#10437)                                                                                                                   |
-| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16  | Add type annotations for AWS operators and hooks (#11434)                                                                                                          |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [d38a0a781](https://github.com/apache/airflow/commit/d38a0a781e123c8c50313efdb23f767d6678afe0) | 2020-10-12  | added type hints for aws cloud formation (#11470)                                                                                                                  |
-| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12  | Remove redundant None provided as default to dict.get() (#11448)                                                                                                   |
-| [c3e340584](https://github.com/apache/airflow/commit/c3e340584bf1892c4f73aa9e7495b5823dab0c40) | 2020-10-11  | Change prefix of AwsDynamoDB hook module (#11209)                                                                                                                  |
-| [42a23d16f](https://github.com/apache/airflow/commit/42a23d16fe9b2f165b0805fb767ecbb825c93657) | 2020-10-11  | Update MySQLToS3Operator&#39;s s3_bucket to template_fields (#10778)                                                                                                   |
-| [422b61a9d](https://github.com/apache/airflow/commit/422b61a9dd95ab9d00b239daa14d87d7cae5ae73) | 2020-10-09  | Adding ElastiCache Hook for creating, describing and deleting replication groups (#8701)                                                                           |
-| [dd98b2149](https://github.com/apache/airflow/commit/dd98b21494ff6036242b63268140abe1294b3657) | 2020-10-06  | Add acl_policy parameter to GCSToS3Operator (#10804) (#10829)                                                                                                      |
-| [32b3cfbcf](https://github.com/apache/airflow/commit/32b3cfbcf0209cb062dd641c1232ab25d02d4d6d) | 2020-10-06  | Strict type check for all hooks in amazon (#11250)                                                                                                                 |
-| [6d573e8ab](https://github.com/apache/airflow/commit/6d573e8abbf87e3c7281347e03d428a6e5baccd4) | 2020-10-03  | Add s3 key to template fields for s3/redshift transfer operators (#10890)                                                                                          |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [00ffedb8c](https://github.com/apache/airflow/commit/00ffedb8c402eb5638782628eb706a5f28215eac) | 2020-09-30  | Add amazon glacier to GCS transfer operator (#10947)                                                                                                               |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                                                                                                     |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                                                                                                 |
-| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21  | Add D204 pydocstyle check (#11031)                                                                                                                                 |
-| [2410f592a](https://github.com/apache/airflow/commit/2410f592a4ab160b377f1a9e5de3b7262b9851cc) | 2020-09-19  | Get Airflow configs with sensitive data from AWS Systems Manager (#11023)                                                                                          |
-| [2bf7b7cac](https://github.com/apache/airflow/commit/2bf7b7cac7858f5a6a495f1a9eb4780ec84f95b4) | 2020-09-19  | Add typing to amazon provider EMR (#10910)                                                                                                                         |
-| [9edfcb7ac](https://github.com/apache/airflow/commit/9edfcb7ac46917836ec956264da8876e58d92392) | 2020-09-19  | Support extra_args in S3Hook and GCSToS3Operator (#11001)                                                                                                          |
-| [4e1f3a69d](https://github.com/apache/airflow/commit/4e1f3a69db8614c302e4916332555034053b935c) | 2020-09-14  | [AIRFLOW-10645] Add AWS Secrets Manager Hook (#10655)                                                                                                              |
-| [e9add7916](https://github.com/apache/airflow/commit/e9add79160e3a16bb348e30f4e83386a371dbc1e) | 2020-09-14  | Fix Failing static tests on Master (#10927)                                                                                                                        |
-| [383a118d2](https://github.com/apache/airflow/commit/383a118d2df618e46d81c520cd2c4a31d81b33dd) | 2020-09-14  | Add more type annotations to AWS hooks (#10671)                                                                                                                    |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [2934220dc](https://github.com/apache/airflow/commit/2934220dc98e295764f7791d33e121629ed2fbbb) | 2020-09-08  | Always return a list from S3Hook list methods (#10774)                                                                                                             |
-| [f40ac9b15](https://github.com/apache/airflow/commit/f40ac9b151124dbcd87197d6ae38c85191d41f38) | 2020-09-01  | Add placement_strategy option (#9444)                                                                                                                              |
-| [e4878e677](https://github.com/apache/airflow/commit/e4878e6775bbe5cb2a1d786e57e009271b78bba0) | 2020-08-31  | fix type hints for s3 hook read_key method (#10653)                                                                                                                |
-| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29  | Update Google Cloud branding (#10642)                                                                                                                              |
-| [8969b7185](https://github.com/apache/airflow/commit/8969b7185ebc3c90168ce9a2fb97dfbc74d2bed9) | 2020-08-28  | Removed bad characters from AWS operator (#10590)                                                                                                                  |
-| [8349061f9](https://github.com/apache/airflow/commit/8349061f9cb01a92c87edd349cc844c4053851e8) | 2020-08-26  | Improve Docstring for AWS Athena Hook/Operator (#10580)                                                                                                            |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24  | Implement impersonation in google operators (#10052)                                                                                                               |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [c6358045f](https://github.com/apache/airflow/commit/c6358045f9d61af63c96833cb6682d6f382a6408) | 2020-08-22  | Fixes S3ToRedshift COPY query (#10436)                                                                                                                             |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                                                                                               |
-| [27d08b76a](https://github.com/apache/airflow/commit/27d08b76a2d171d716a1599157a8a60a121dbec6) | 2020-08-21  | Amazon SES Hook (#10391)                                                                                                                                           |
-| [dea345b05](https://github.com/apache/airflow/commit/dea345b05c2cd226e70f97a3934d7456aa1cc754) | 2020-08-17  | Fix AwsGlueJobSensor to stop running after the Glue job finished (#9022)                                                                                           |
-| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12  | Enable Sphinx spellcheck for doc generation (#10280)                                                                                                               |
-| [82f744b87](https://github.com/apache/airflow/commit/82f744b871bb2c5e9a2d628e1c45ae16c1244240) | 2020-08-11  | Add type annotations to AwsGlueJobHook, RedshiftHook modules (#10286)                                                                                              |
-| [19bc97d0c](https://github.com/apache/airflow/commit/19bc97d0ce436a6ec9d8e9a5adcd48c0a769d01f) | 2020-08-10  | Revert &#34;Add Amazon SES hook (#10004)&#34; (#10276)                                                                                                                     |
-| [f06fe616e](https://github.com/apache/airflow/commit/f06fe616e66256bdc53710de505c2c6b1bd21528) | 2020-08-10  | Add Amazon SES hook (#10004)                                                                                                                                       |
-| [0c77ea8a3](https://github.com/apache/airflow/commit/0c77ea8a3c417805f66d10f0c757ca218bf8dee0) | 2020-08-06  | Add type annotations to S3 hook module (#10164)                                                                                                                    |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)                                                                                               |
-| [9667314b2](https://github.com/apache/airflow/commit/9667314b2fb879edc451793a8350123507e1cfd6) | 2020-08-05  | Add correct signatures for operators in amazon provider package (#10167)                                                                                           |
-| [000287753](https://github.com/apache/airflow/commit/000287753b478f29e6c25442ac253e3a6c8e8c87) | 2020-08-03  | Improve Typing coverage of amazon/aws/athena (#10025)                                                                                                              |
-| [53ada6e79](https://github.com/apache/airflow/commit/53ada6e7911f411e80ebb00be9f07a7cc0788d01) | 2020-08-03  | Add S3KeysUnchangedSensor (#9817)                                                                                                                                  |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                                                                                               |
-| [2b8dea64e](https://github.com/apache/airflow/commit/2b8dea64e9e8716fba8c38a1b439f7835bbd2918) | 2020-08-01  | Fix typo in Athena sensor retries (#10079)                                                                                                                         |
-| [1508c43ec](https://github.com/apache/airflow/commit/1508c43ec9594e801b415dd82472fa017791b759) | 2020-07-29  | Adding new SageMaker operator for ProcessingJobs (#9594)                                                                                                           |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25  | Stop using start_date in default_args in example_dags (#9982)                                                                                                      |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                                                                                        |
-| [e7c87fe45](https://github.com/apache/airflow/commit/e7c87fe453c6a70ed087c7ffbccaacbf0d2831b9) | 2020-07-20  | Refactor AwsBaseHook._get_credentials (#9878)                                                                                                                      |
-| [2577f9334](https://github.com/apache/airflow/commit/2577f9334a5cb71cccd97e62b0ae2d097cb99e1a) | 2020-07-16  | Fix S3FileTransformOperator to support S3 Select transformation only (#8936)                                                                                       |
-| [52b6efe1e](https://github.com/apache/airflow/commit/52b6efe1ecaae74b9c2497f565e116305d575a76) | 2020-07-15  | Add option to delete by prefix to S3DeleteObjectsOperator (#9350)                                                                                                  |
-| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13  | Keep functions signatures in decorators (#9786)                                                                                                                    |
-| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08  | Get Airflow configs with sensitive data from Secret Backends (#9645)                                                                                               |
-| [07b81029e](https://github.com/apache/airflow/commit/07b81029ebc2a296fb54181f2cec11fcc7704d9d) | 2020-07-08  | Allow AWSAthenaHook to get more than 1000/first page of results (#6075)                                                                                            |
-| [564192c16](https://github.com/apache/airflow/commit/564192c1625a552456cebb3751978c08eebdb2a1) | 2020-07-08  | Add AWS StepFunctions integrations to the aws provider (#8749)                                                                                                     |
-| [ecce1ace7](https://github.com/apache/airflow/commit/ecce1ace7a277c948c61d7d4cbfc8632cc216559) | 2020-07-08  | [AIRFLOW-XXXX] Remove unnecessary docstring in AWSAthenaOperator                                                                                                   |
-| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06  | Move provider&#39;s log task handlers to the provider package (#9604)                                                                                                  |
-| [ee20086b8](https://github.com/apache/airflow/commit/ee20086b8c499fa40dcaac71652f21b466e7f80f) | 2020-07-02  | Move S3TaskHandler to the AWS provider package (#9602)                                                                                                             |
-| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29  | Remove almost all references to airflow.contrib (#9559)                                                                                                            |
-| [c858babdd](https://github.com/apache/airflow/commit/c858babddf8b18b417993b5bfefec1c5635510da) | 2020-06-26  | Remove kwargs from Super calls in AWS Secrets Backends (#9523)                                                                                                     |
-| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25  | Use literal syntax instead of function calls to create data structure (#9516)                                                                                      |
-| [c7a454aa3](https://github.com/apache/airflow/commit/c7a454aa32bf33133d042e8438ac259b32144b21) | 2020-06-22  | Add AWS ECS system test (#8888)                                                                                                                                    |
-| [df8efd04f](https://github.com/apache/airflow/commit/df8efd04f394afc4b5affb677bc78d8b7bd5275a) | 2020-06-21  | Enable &amp; Fix &#34;Docstring Content Issues&#34; PyDocStyle Check (#9460)                                                                                                   |
-| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21  | Enable &amp; Fix Whitespace related PyDocStyle Checks (#9458)                                                                                                          |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [992a18c84](https://github.com/apache/airflow/commit/992a18c84a355d13e821c703e7364f12233c37dc) | 2020-06-19  | Move MySqlToS3Operator to transfers (#9400)                                                                                                                        |
-| [a60f589aa](https://github.com/apache/airflow/commit/a60f589aa251cc3df6bec5b306ad4a7f736f539f) | 2020-06-19  | Add MySqlToS3Operator (#9054)                                                                                                                                      |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18  | Detect automatically the lack of reference to the guide in the operator descriptions (#9290)                                                                       |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [58a8ec0e4](https://github.com/apache/airflow/commit/58a8ec0e46f624ee0369dd156dd8fb4f81884a21) | 2020-06-16  | AWSBatchOperator &lt;&gt; ClientHook relation changed to composition (#9306)                                                                                             |
-| [a80cd25e8](https://github.com/apache/airflow/commit/a80cd25e8eb7f8b5d89af26cdcd62a5bbe44d65c) | 2020-06-15  | Close/Flush byte stream in s3 hook load_string and load_bytes (#9211)                                                                                              |
-| [ffb857403](https://github.com/apache/airflow/commit/ffb85740373f7adb70d28ec7d5a8886380170e5e) | 2020-06-14  | Decrypt secrets from SystemsManagerParameterStoreBackend (#9214)                                                                                                   |
-| [a69b031f2](https://github.com/apache/airflow/commit/a69b031f20c5a1cd032f9873394374f661811e8f) | 2020-06-10  | Add S3ToRedshift example dag and system test (#8877)                                                                                                               |
-| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02  | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106)                                                                                     |
-| [357e11e0c](https://github.com/apache/airflow/commit/357e11e0cfb4c02833018e073bc4f5e5b52fae4f) | 2020-05-29  | Add Delete/Create S3 bucket operators (#8895)                                                                                                                      |
-| [1ed171bfb](https://github.com/apache/airflow/commit/1ed171bfb265ded8674058bdc425640d25f1f4fc) | 2020-05-28  | Add script_args for S3FileTransformOperator (#9019)                                                                                                                |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
-| [f946f96da](https://github.com/apache/airflow/commit/f946f96da45d8e6101805450d8cab7ccb2774ad0) | 2020-05-23  | Old json boto compat removed from dynamodb_to_s3 operator (#8987)                                                                                                  |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [f4edd90a9](https://github.com/apache/airflow/commit/f4edd90a94b8f91bbefbbbfba367372399559596) | 2020-05-16  | Speed up TestAwsLambdaHook by not actually running a function (#8882)                                                                                              |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [85bbab27d](https://github.com/apache/airflow/commit/85bbab27dbb4f55f6f322b894fe3d54797076c15) | 2020-05-15  | Add EMR operators howto docs (#8863)                                                                                                                               |
-| [e61b9bb9b](https://github.com/apache/airflow/commit/e61b9bb9bbe6d8a0621310f3583483b9135c6770) | 2020-05-13  | Add AWS EMR System tests (#8618)                                                                                                                                   |
-| [ed3f5131a](https://github.com/apache/airflow/commit/ed3f5131a27e2ef0422f2495a4532630a6204f82) | 2020-05-13  | Correctly pass sleep time from AWSAthenaOperator down to the hook. (#8845)                                                                                         |
-| [7236862a1](https://github.com/apache/airflow/commit/7236862a1f5361b5e99c03dd63dae9b966efcd24) | 2020-05-12  | [AIRFLOW-2310] Enable AWS Glue Job Integration (#6007)                                                                                                             |
-| [d590e5e76](https://github.com/apache/airflow/commit/d590e5e7679322bebb1472fa8c7ec6d183e4154a) | 2020-05-11  | Add option to propagate tags in ECSOperator (#8811)                                                                                                                |
-| [0c3db84c3](https://github.com/apache/airflow/commit/0c3db84c3ce5107f53ed5ecc48edfdfe1b97feff) | 2020-05-11  | [AIRFLOW-7068] Create EC2 Hook, Operator and Sensor (#7731)                                                                                                        |
-| [cbebed2b4](https://github.com/apache/airflow/commit/cbebed2b4d0bd1e0984c331c0270e83bf8df8540) | 2020-05-10  | Allow passing backend_kwargs to AWS SSM client (#8802)                                                                                                             |
-| [c7788a689](https://github.com/apache/airflow/commit/c7788a6894cb79c22153434dd9b977393b8236be) | 2020-05-10  | Add imap_attachment_to_s3 example dag and system test (#8669)                                                                                                      |
-| [ff5b70149](https://github.com/apache/airflow/commit/ff5b70149bf51012156378c8fc8b072c7c280d9d) | 2020-05-07  | Add google_api_to_s3_transfer example dags and system tests (#8581)                                                                                                |
-| [4421f011e](https://github.com/apache/airflow/commit/4421f011eeec2d1022a39933e27f530fb9f9c1b1) | 2020-05-01  | Improve template capabilities of EMR job and step operators (#8572)                                                                                                |
-| [379a884d6](https://github.com/apache/airflow/commit/379a884d645a4d73db1c81e3450adc82571989ea) | 2020-04-28  | fix: aws hook should work without conn id (#8534)                                                                                                                  |
-| [74bc316c5](https://github.com/apache/airflow/commit/74bc316c56192f14677e9406d3878887a836062b) | 2020-04-27  | [AIRFLOW-4438] Add Gzip compression to S3_hook (#8571)                                                                                                             |
-| [7ea66a1a9](https://github.com/apache/airflow/commit/7ea66a1a9594704869e82513d3a06fe35b6109b2) | 2020-04-26  | Add example DAG for ECSOperator (#8452)                                                                                                                            |
-| [b6434dedf](https://github.com/apache/airflow/commit/b6434dedf974085e5f8891446fa63104836c8fdf) | 2020-04-24  | [AIRFLOW-7111] Add generate_presigned_url method to S3Hook (#8441)                                                                                                 |
-| [becedd5af](https://github.com/apache/airflow/commit/becedd5af8df01a0210e0a3fa78e619785f39908) | 2020-04-19  | Remove unrelated EC2 references in ECSOperator (#8451)                                                                                                             |
-| [ab1290cb0](https://github.com/apache/airflow/commit/ab1290cb0c5856fa85c8596bfdf780fcdfd99c31) | 2020-04-13  | Make launch_type parameter optional (#8248)                                                                                                                        |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)                                                                                                   |
-| [b46d6c060](https://github.com/apache/airflow/commit/b46d6c060280da59193a28cf67e791eb825cb51c) | 2020-04-08  | Add support for AWS Secrets Manager as Secrets Backend (#8186)                                                                                                     |
-| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04  | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541)                                                                                                         |
-| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31  | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011)                                                                                        |
-| [7239d9a82](https://github.com/apache/airflow/commit/7239d9a82dbb3b9bdf27b531daa70338af9dd796) | 2020-03-28  | Get Airflow Variables from AWS Systems Manager Parameter Store (#7945)                                                                                             |
-| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28  | Make BaseSecretsBackend.build_path generic (#7948)                                                                                                                 |
-| [438da7241](https://github.com/apache/airflow/commit/438da7241eb537e3ef5ae711629446155bf738a3) | 2020-03-28  | [AIRFLOW-5825] SageMakerEndpointOperator is not idempotent (#7891)                                                                                                 |
-| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25  | Standardize SecretBackend class names (#7846)                                                                                                                      |
-| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23  | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830)                                                                                                     |
-| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23  | Add missing call to Super class in &#39;amazon&#39;, &#39;cloudant &amp; &#39;databricks&#39; providers (#7827)                                                                            |
-| [a36002412](https://github.com/apache/airflow/commit/a36002412334c445e4eab41fdbb85ef31b6fd384) | 2020-03-19  | [AIRFLOW-5705] Make AwsSsmSecretsBackend consistent with VaultBackend (#7753)                                                                                      |
-| [2a54512d7](https://github.com/apache/airflow/commit/2a54512d785ba603ba71381dc3dfa049e9f74063) | 2020-03-17  | [AIRFLOW-5705] Fix bugs in AWS SSM Secrets Backend (#7745)                                                                                                         |
-| [a8b5fc74d](https://github.com/apache/airflow/commit/a8b5fc74d07e50c91bb64cb66ca1a450aa5ce6e1) | 2020-03-16  | [AIRFLOW-4175] S3Hook load_file should support ACL policy paramete (#7733)                                                                                         |
-| [e31e9ddd2](https://github.com/apache/airflow/commit/e31e9ddd2332e5d92422baf668acee441646ad68) | 2020-03-14  | [AIRFLOW-5705] Add secrets backend and support for AWS SSM (#6376)                                                                                                 |
-| [3bb60afc7](https://github.com/apache/airflow/commit/3bb60afc7b8319996385d681faac342afe2b3bd2) | 2020-03-13  | [AIRFLOW-6975] Base AWSHook AssumeRoleWithSAML (#7619)                                                                                                             |
-| [c0c5f11ad](https://github.com/apache/airflow/commit/c0c5f11ad11a5a38e0553c1a36aa75eb83efae51) | 2020-03-12  | [AIRFLOW-6884] Make SageMakerTrainingOperator idempotent (#7598)                                                                                                   |
-| [b7cdda1c6](https://github.com/apache/airflow/commit/b7cdda1c64595bc7f85519337029de259e573fce) | 2020-03-10  | [AIRFLOW-4438] Add Gzip compression to S3_hook (#7680)                                                                                                             |
-| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07  | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506)                                                                                                   |
-| [9a94ab246](https://github.com/apache/airflow/commit/9a94ab246db8c09aa83bb6a6d245b1ca9563bcd9) | 2020-03-01  | [AIRFLOW-6962] Fix compeleted to completed (#7600)                                                                                                                 |
-| [1b38f6d9b](https://github.com/apache/airflow/commit/1b38f6d9b6710bd5e25fc16883599f1842ab7cb9) | 2020-02-29  | [AIRFLOW-5908] Add download_file to S3 Hook (#6577)                                                                                                                |
-| [3ea3e1a2b](https://github.com/apache/airflow/commit/3ea3e1a2b580b7ed10efe668de0cc37b03673500) | 2020-02-26  | [AIRFLOW-6824] EMRAddStepsOperator problem with multi-step XCom (#7443)                                                                                            |
-| [6eaa7e3b1](https://github.com/apache/airflow/commit/6eaa7e3b1845644d5ec65a00a997f4029bec9628) | 2020-02-25  | [AIRFLOW-5924] Automatically unify bucket name and key in S3Hook (#6574)                                                                                           |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [7d0e7122d](https://github.com/apache/airflow/commit/7d0e7122dd14576d834c6f66fe919a72b100b7f8) | 2020-02-24  | [AIRFLOW-6830] Add Subject/MessageAttributes to SNS hook and operator (#7451)                                                                                      |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [47a922b86](https://github.com/apache/airflow/commit/47a922b86426968bfa07cc7892d2eeeca761d884) | 2020-02-21  | [AIRFLOW-6854] Fix missing typing_extensions on python 3.8 (#7474)                                                                                                 |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412)                                                                           |
-| [58c3542ed](https://github.com/apache/airflow/commit/58c3542ed25061320ce61dbe0adf451a44c738dd) | 2020-02-12  | [AIRFLOW-5231] Fix S3Hook.delete_objects method (#7375)                                                                                                            |
-| [b7aa778b3](https://github.com/apache/airflow/commit/b7aa778b38df2f116a1c20031e72fea8b97315bf) | 2020-02-10  | [AIRFLOW-6767] Correct name for default Athena workgroup (#7394)                                                                                                   |
-| [9282185e6](https://github.com/apache/airflow/commit/9282185e6624e64bb7f17447f81c1b2d1bb4d56d) | 2020-02-09  | [AIRFLOW-6761] Fix WorkGroup param in AWSAthenaHook (#7386)                                                                                                        |
-| [94fccca97](https://github.com/apache/airflow/commit/94fccca97030ee59d89f302a98137b17e7b01a33) | 2020-02-04  | [AIRFLOW-XXXX] Add pre-commit check for utf-8 file encoding (#7347)                                                                                                |
-| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03  | [AIRFLOW-4681] Make sensors module pylint compatible (#7309)                                                                                                       |
-| [88e40c714](https://github.com/apache/airflow/commit/88e40c714d2853aa8966796945b2907c263fed08) | 2020-02-03  | [AIRFLOW-6716] Fix AWS Datasync Example DAG (#7339)                                                                                                                |
-| [a311d3d82](https://github.com/apache/airflow/commit/a311d3d82e0c2e32bcb56e29f33c95ed0a2a2ddc) | 2020-02-03  | [AIRFLOW-6718] Fix more occurrences of utils.dates.days_ago (#7341)                                                                                                |
-| [cb766b05b](https://github.com/apache/airflow/commit/cb766b05b17b80fd54a5ce6ac3ee35a631115000) | 2020-02-03  | [AIRFLOW-XXXX] Fix Static Checks on CI (#7342)                                                                                                                     |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [7527eddc5](https://github.com/apache/airflow/commit/7527eddc5e9729aa7e732209a07d57985f6c73e4) | 2020-02-02  | [AIRFLOW-4364] Make all code in airflow/providers/amazon pylint compatible (#7336)                                                                                 |
-| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02  | [AIRFLOW-6708] Set unique logger names (#7330)                                                                                                                     |
-| [63aa3db88](https://github.com/apache/airflow/commit/63aa3db88f8824efe79622301efd9f8ba75b991c) | 2020-02-02  | [AIRFLOW-6258] Add CloudFormation operators to AWS providers (#6824)                                                                                               |
-| [af4157fde](https://github.com/apache/airflow/commit/af4157fdeffc0c18492b518708c0db44815067ab) | 2020-02-02  | [AIRFLOW-6672] AWS DataSync - better logging of error message (#7288)                                                                                              |
-| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30  | [AIRFLOW-6682] Move GCP classes to providers package (#7295)                                                                                                       |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [1988a97e8](https://github.com/apache/airflow/commit/1988a97e8f687e28a5a39b29677fb514e097753c) | 2020-01-28  | [AIRFLOW-6659] Move AWS Transfer operators to providers package (#7274)                                                                                            |
-| [ab10443e9](https://github.com/apache/airflow/commit/ab10443e965269efe9c1efaf5fa33bcdbe609f13) | 2020-01-28  | [AIRFLOW-6424] Added a operator to modify EMR cluster (#7213)                                                                                                      |
-| [40246132a](https://github.com/apache/airflow/commit/40246132a7ef3b07fe3173c6e7646ed6b53aad6e) | 2020-01-28  | [AIRFLOW-6654] AWS DataSync - bugfix when creating locations (#7270)                                                                                               |
-| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28  | [AIRFLOW-6655] Move AWS classes to providers (#7271)                                                                                                               |
-| [599e4791c](https://github.com/apache/airflow/commit/599e4791c91cff411b1bf1c45555db5094c2b420) | 2020-01-18  | [AIRFLOW-6541] Use EmrJobFlowSensor for other states (#7146)                                                                                                       |
-| [c319e81ca](https://github.com/apache/airflow/commit/c319e81cae1de31ad1373903252d8608ffce1fba) | 2020-01-17  | [AIRFLOW-6572] Move AWS classes to providers.amazon.aws package (#7178)                                                                                            |
-| [941a07057](https://github.com/apache/airflow/commit/941a070578bc7d9410715b89658548167352cc4d) | 2020-01-15  | [AIRFLOW-6570] Add dag tag for all example dag (#7176)                                                                                                             |
-| [78d8fe694](https://github.com/apache/airflow/commit/78d8fe6944b689b9b0af99255286e34e06eedec3) | 2020-01-08  | [AIRFLOW-6245] Add custom waiters for AWS batch jobs (#6811)                                                                                                       |
-| [e0b022725](https://github.com/apache/airflow/commit/e0b022725749181bd4e30933e4a0ffefb993eede) | 2019-12-28  | [AIRFLOW-6319] Add support for AWS Athena workgroups (#6871)                                                                                                       |
-| [57da45685](https://github.com/apache/airflow/commit/57da45685457520d51a0967e2aeb5e5ff162dfa7) | 2019-12-24  | [AIRFLOW-6333] Bump Pylint to 2.4.4 &amp; fix/disable new checks (#6888)                                                                                               |
-| [cf647c27e](https://github.com/apache/airflow/commit/cf647c27e0f35bbd1183bfcf87a106cbdb69d3fa) | 2019-12-18  | [AIRFLOW-6038] AWS DataSync reworked (#6773)                                                                                                                       |
-| [7502cad28](https://github.com/apache/airflow/commit/7502cad2844139d57e4276d971c0706a361d9dbe) | 2019-12-17  | [AIRFLOW-6206] Move and rename AWS batch operator [AIP-21] (#6764)                                                                                                 |
-| [c4c635df6](https://github.com/apache/airflow/commit/c4c635df6906f56e01724573923e19763bb0da62) | 2019-12-17  | [AIRFLOW-6083] Adding ability to pass custom configuration to lambda client. (#6678)                                                                               |
-| [4fb498f87](https://github.com/apache/airflow/commit/4fb498f87ef89acc30f2576ebc5090ab0653159e) | 2019-12-09  | [AIRFLOW-6072] aws_hook: Outbound http proxy setting and other enhancements (#6686)                                                                                |
-| [a1e2f8635](https://github.com/apache/airflow/commit/a1e2f863526973b17892ec31caf09eded95c1cd2) | 2019-11-20  | [AIRFLOW-6021] Replace list literal with list constructor (#6617)                                                                                                  |
-| [baae14084](https://github.com/apache/airflow/commit/baae140847cdf9d84e905fb6d1f119d6950eecf9) | 2019-11-19  | [AIRFLOW-5781] AIP-21 Migrate AWS Kinesis to /providers/amazon/aws (#6588)                                                                                         |
-| [504cfbac1](https://github.com/apache/airflow/commit/504cfbac1a4ec2e2fd169523ed357808f63881bb) | 2019-11-18  | [AIRFLOW-5783] AIP-21 Move aws redshift into providers structure (#6539)                                                                                           |
-| [992f0e3ac](https://github.com/apache/airflow/commit/992f0e3acf11163294508858515a5f79116e3ad8) | 2019-11-12  | AIRFLOW-5824: AWS DataSync Hook and Operators added (#6512)                                                                                                        |
-| [c015eb2f6](https://github.com/apache/airflow/commit/c015eb2f6496b9721afda9e85d5d4af3bbe0696b) | 2019-11-10  | [AIRFLOW-5786] Migrate AWS SNS to /providers/amazon/aws (#6502)                                                                                                    |
-| [3d76fb4bf](https://github.com/apache/airflow/commit/3d76fb4bf25e5b7d3d30e0d64867b5999b77f0b0) | 2019-11-09  | [AIRFLOW-5782] Migrate AWS Lambda to /providers/amazon/aws [AIP-21] (#6518)                                                                                        |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/apache/cassandra/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/apache/cassandra/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/apache/cassandra/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/apache/cassandra/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/cassandra/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index e413e97..0000000
--- a/airflow/providers/apache/cassandra/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,50 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                          |
-|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                  |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)             |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                 |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)               |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                       |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                       |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                   |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                           |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                          |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)   |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)               |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                  |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                    |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                       |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)               |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                               |
-| [0646849e3](https://github.com/apache/airflow/commit/0646849e3dacdc2bc62705ae136f3ad3b16232e9) | 2020-10-14  | Add protocol_version to conn_config for Cassandrahook (#11036)                   |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                     |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                       |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                             |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                     |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                      |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                          |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                       |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                      |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                      |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                     |
-| [750555f26](https://github.com/apache/airflow/commit/750555f261616d809d24b8550b9482a713ba3171) | 2020-07-19  | Add guide for Cassandra Operators (#9877)                                        |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                   |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                       |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                      |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                           |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                    |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                      |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                     |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                          |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                     |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)          |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                               |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                         |
-| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01  | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701)                |
-| [f88f06c86](https://github.com/apache/airflow/commit/f88f06c862b6096e974871decd14b86811cc4bc6) | 2019-11-30  | [AIRFLOW-6131] Make Cassandra hooks/sensors pylint compatible (#6693)            |
-| [f987646d7](https://github.com/apache/airflow/commit/f987646d7d85683cdc73ae9438a2a8c4a2992c7f) | 2019-11-22  | [AIRFLOW-5950] AIP-21 Change import paths for &#34;apache/cassandra&#34; modules (#6609) |
diff --git a/airflow/providers/apache/cassandra/README.md b/airflow/providers/apache/cassandra/README.md
deleted file mode 100644
index 83f3b99..0000000
--- a/airflow/providers/apache/cassandra/README.md
+++ /dev/null
@@ -1,144 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-apache-cassandra
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Sensors](#sensors)
-        - [Moved sensors](#moved-sensors)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `apache.cassandra` provider. All classes for this provider package
-are in `airflow.providers.apache.cassandra` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-apache-cassandra`
-
-## PIP requirements
-
-| PIP package      | Version required   |
-|:-----------------|:-------------------|
-| cassandra-driver | &gt;=3.13.0,&lt;3.21.0   |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.cassandra` provider
-are in the `airflow.providers.apache.cassandra` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Sensors
-
-
-
-### Moved sensors
-
-| Airflow 2.0 sensors: `airflow.providers.apache.cassandra` package                                                                          | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                            |
-|:-------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [sensors.record.CassandraRecordSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/cassandra/sensors/record.py) | [contrib.sensors.cassandra_record_sensor.CassandraRecordSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/cassandra_record_sensor.py) |
-| [sensors.table.CassandraTableSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/cassandra/sensors/table.py)    | [contrib.sensors.cassandra_table_sensor.CassandraTableSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/cassandra_table_sensor.py)    |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.apache.cassandra` package                                                                      | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                              |
-|:-------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------|
-| [hooks.cassandra.CassandraHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/cassandra/hooks/cassandra.py) | [contrib.hooks.cassandra_hook.CassandraHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/cassandra_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                          |
-|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                  |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)             |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                 |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)               |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                       |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                       |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                   |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                           |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                          |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)   |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)               |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                  |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                    |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                       |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)               |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                               |
-| [0646849e3](https://github.com/apache/airflow/commit/0646849e3dacdc2bc62705ae136f3ad3b16232e9) | 2020-10-14  | Add protocol_version to conn_config for Cassandrahook (#11036)                   |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                     |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                       |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                             |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                     |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                      |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                          |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                       |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                      |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                      |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                     |
-| [750555f26](https://github.com/apache/airflow/commit/750555f261616d809d24b8550b9482a713ba3171) | 2020-07-19  | Add guide for Cassandra Operators (#9877)                                        |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                   |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                       |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                      |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                           |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                    |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                      |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                     |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                          |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                     |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)          |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                               |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                         |
-| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01  | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701)                |
-| [f88f06c86](https://github.com/apache/airflow/commit/f88f06c862b6096e974871decd14b86811cc4bc6) | 2019-11-30  | [AIRFLOW-6131] Make Cassandra hooks/sensors pylint compatible (#6693)            |
-| [f987646d7](https://github.com/apache/airflow/commit/f987646d7d85683cdc73ae9438a2a8c4a2992c7f) | 2019-11-22  | [AIRFLOW-5950] AIP-21 Change import paths for &#34;apache/cassandra&#34; modules (#6609) |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/apache/druid/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/apache/druid/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/apache/druid/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/apache/druid/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/druid/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 4c0a7b1..0000000
--- a/airflow/providers/apache/druid/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,52 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                     |
-|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                             |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                  |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                  |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                              |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                      |
-| [3a72fc824](https://github.com/apache/airflow/commit/3a72fc82475df3b745a00a7b5e34eef9d27b3329) | 2020-11-14  | Fix Description of Provider Docs (#12361)                                                   |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                     |
-| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10  | Simplify string expressions &amp; Use f-string (#12216)                                         |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)              |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                          |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                             |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                        |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                               |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                           |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                  |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                          |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                          |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                  |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                        |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                          |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                            |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                 |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                     |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                  |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                        |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                                 |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                 |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                                |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                              |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                  |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                 |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                      |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                               |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                 |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                     |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                     |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                            |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                                          |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                    |
-| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29  | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) |
-| [086d731ce](https://github.com/apache/airflow/commit/086d731ce0066b3037d96df2a05cea1101ed3c17) | 2020-01-14  | [AIRFLOW-6510] Fix druid operator templating (#7127)                                        |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142)              |
diff --git a/airflow/providers/apache/druid/README.md b/airflow/providers/apache/druid/README.md
deleted file mode 100644
index 11aa2ac..0000000
--- a/airflow/providers/apache/druid/README.md
+++ /dev/null
@@ -1,176 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-apache-druid
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Cross provider package dependencies](#cross-provider-package-dependencies)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [Moved operators](#moved-operators)
-    - [Transfer operators](#transfer-operators)
-        - [Moved transfer operators](#moved-transfer-operators)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `apache.druid` provider. All classes for this provider package
-are in `airflow.providers.apache.druid` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-apache-druid`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| pydruid       | &gt;=0.4.1            |
-
-## Cross provider package dependencies
-
-Those are dependencies that might be needed in order to use all the features of the package.
-You need to install the specified backport providers package in order to use them.
-
-You can install such cross-provider dependencies when installing from PyPI. For example:
-
-```bash
-pip install apache-airflow-providers-apache-druid[apache.hive]
-```
-
-| Dependent package                                                                                     | Extra       |
-|:------------------------------------------------------------------------------------------------------|:------------|
-| [apache-airflow-providers-apache-hive](https://pypi.org/project/apache-airflow-providers-apache-hive) | apache.hive |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.druid` provider
-are in the `airflow.providers.apache.druid` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.apache.druid` package                                                                                   | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                       |
-|:--------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.druid.DruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid.py)                  | [contrib.operators.druid_operator.DruidOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/druid_operator.py)  |
-| [operators.druid_check.DruidCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid_check.py) | [operators.druid_check_operator.DruidCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/druid_check_operator.py) |
-
-
-## Transfer operators
-
-
-
-### Moved transfer operators
-
-| Airflow 2.0 transfers: `airflow.providers.apache.druid` package                                                                                        | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                          |
-|:-------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------|
-| [transfers.hive_to_druid.HiveToDruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/transfers/hive_to_druid.py) | [operators.hive_to_druid.HiveToDruidTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_druid.py) |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.apache.druid` package                                                               | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                       |
-|:--------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
-| [hooks.druid.DruidDbApiHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/hooks/druid.py) | [hooks.druid_hook.DruidDbApiHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/druid_hook.py) |
-| [hooks.druid.DruidHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/hooks/druid.py)      | [hooks.druid_hook.DruidHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/druid_hook.py)      |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                     |
-|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                             |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                  |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                  |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                              |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                      |
-| [3a72fc824](https://github.com/apache/airflow/commit/3a72fc82475df3b745a00a7b5e34eef9d27b3329) | 2020-11-14  | Fix Description of Provider Docs (#12361)                                                   |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                     |
-| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10  | Simplify string expressions &amp; Use f-string (#12216)                                         |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)              |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                          |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                             |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                        |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                               |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                           |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                  |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                          |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                          |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                  |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                        |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                          |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                            |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                 |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                     |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                  |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                        |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                                 |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                 |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                                |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                              |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                  |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                 |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                      |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                               |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                 |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                     |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                     |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                            |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                                          |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                    |
-| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29  | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) |
-| [086d731ce](https://github.com/apache/airflow/commit/086d731ce0066b3037d96df2a05cea1101ed3c17) | 2020-01-14  | [AIRFLOW-6510] Fix druid operator templating (#7127)                                        |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142)              |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/apache/hdfs/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/apache/hdfs/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/apache/hdfs/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/apache/hdfs/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/hdfs/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index d586356..0000000
--- a/airflow/providers/apache/hdfs/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,53 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21  | Fix Python Docstring parameters (#12513)                                       |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25  | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530)          |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                 |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                    |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                    |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                   |
-| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06  | Upgrade to latest pre-commit checks (#9686)                                    |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)               |
-| [6c670870a](https://github.com/apache/airflow/commit/6c670870aa6ea5d82a86f912bb6de8b88e711ca5) | 2020-03-25  | [AIRFLOW-6833] HA for webhdfs connection (#7454)                               |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                             |
-| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03  | [AIRFLOW-4681] Make sensors module pylint compatible (#7309)                   |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02  | [AIRFLOW-6708] Set unique logger names (#7330)                                 |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) |
diff --git a/airflow/providers/apache/hdfs/README.md b/airflow/providers/apache/hdfs/README.md
deleted file mode 100644
index 805d289..0000000
--- a/airflow/providers/apache/hdfs/README.md
+++ /dev/null
@@ -1,150 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-apache-hdfs
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Sensors](#sensors)
-        - [Moved sensors](#moved-sensors)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `apache.hdfs` provider. All classes for this provider package
-are in `airflow.providers.apache.hdfs` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-apache-hdfs`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| snakebite-py3 |                    |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.hdfs` provider
-are in the `airflow.providers.apache.hdfs` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Sensors
-
-
-
-### Moved sensors
-
-| Airflow 2.0 sensors: `airflow.providers.apache.hdfs` package                                                                      | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                               |
-|:----------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------|
-| [sensors.hdfs.HdfsFolderSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/hdfs.py)      | [contrib.sensors.hdfs_sensor.HdfsSensorFolder](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/hdfs_sensor.py) |
-| [sensors.hdfs.HdfsRegexSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/hdfs.py)       | [contrib.sensors.hdfs_sensor.HdfsSensorRegex](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/hdfs_sensor.py)  |
-| [sensors.hdfs.HdfsSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/hdfs.py)            | [sensors.hdfs_sensor.HdfsSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/hdfs_sensor.py)                       |
-| [sensors.web_hdfs.WebHdfsSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/web_hdfs.py) | [sensors.web_hdfs_sensor.WebHdfsSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/web_hdfs_sensor.py)            |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.apache.hdfs` package                                                                | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                        |
-|:--------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------|
-| [hooks.hdfs.HDFSHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/hooks/hdfs.py)          | [hooks.hdfs_hook.HDFSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hdfs_hook.py)          |
-| [hooks.webhdfs.WebHDFSHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/hooks/webhdfs.py) | [hooks.webhdfs_hook.WebHDFSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/webhdfs_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21  | Fix Python Docstring parameters (#12513)                                       |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25  | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530)          |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                 |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                    |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                    |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                   |
-| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06  | Upgrade to latest pre-commit checks (#9686)                                    |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)               |
-| [6c670870a](https://github.com/apache/airflow/commit/6c670870aa6ea5d82a86f912bb6de8b88e711ca5) | 2020-03-25  | [AIRFLOW-6833] HA for webhdfs connection (#7454)                               |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                             |
-| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03  | [AIRFLOW-4681] Make sensors module pylint compatible (#7309)                   |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02  | [AIRFLOW-6708] Set unique logger names (#7330)                                 |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/apache/hive/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/apache/hive/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/apache/hive/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/apache/hive/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/hive/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 0f9b0f1..0000000
--- a/airflow/providers/apache/hive/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,77 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [a075b6df9](https://github.com/apache/airflow/commit/a075b6df99a4f5e21d198f7be56b577432e6f9db) | 2020-12-09  | Rename remaining Sensors to match AIP-21 (#12927)                                                                                                                  |
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                                                                                                 |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10  | Fix spelling in Python files (#12230)                                                                                                                              |
-| [502ba309e](https://github.com/apache/airflow/commit/502ba309ea470943f0e99c634269e3d2d13ce6ca) | 2020-11-10  | Enable Markdownlint rule - MD022/blanks-around-headings (#12225)                                                                                                   |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                                                                                               |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                                                                                                     |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                                                                                                 |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08  | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499)                                                                              |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25  | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)                                                                                             |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [27339a5a0](https://github.com/apache/airflow/commit/27339a5a0f9e382dbc7d32a128f0831a48ef9a12) | 2020-08-22  | Remove mentions of Airflow Gitter (#10460)                                                                                                                         |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                                                                                               |
-| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12  | DbApiHook: Support kwargs in get_pandas_df (#9730)                                                                                                                 |
-| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09  | Fix various typos in the repo (#10263)                                                                                                                             |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                                                                                                        |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                                                                                        |
-| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22  | More strict rules in mypy (#9705) (#9906)                                                                                                                          |
-| [5013fda8f](https://github.com/apache/airflow/commit/5013fda8f072e633c114fb39fb59a22f60200b40) | 2020-07-20  | Add drop_partition functionality for HiveMetastoreHook (#9472)                                                                                                     |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                                                                                                       |
-| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06  | Upgrade to latest pre-commit checks (#9686)                                                                                                                        |
-| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21  | Enable &amp; Fix Whitespace related PyDocStyle Checks (#9458)                                                                                                          |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [c78e2a5fe](https://github.com/apache/airflow/commit/c78e2a5feae15e84b05430cfc5935f0e289fb6b4) | 2020-06-16  | Make hive macros py3 compatible (#8598)                                                                                                                            |
-| [6350fd6eb](https://github.com/apache/airflow/commit/6350fd6ebb9958982cb3fa1d466168fc31708035) | 2020-06-08  | Don&#39;t use the term &#34;whitelist&#34; - language matters (#9174)                                                                                                          |
-| [10796cb7c](https://github.com/apache/airflow/commit/10796cb7ce52c8ac2f68024e531fdda779547bdf) | 2020-06-03  | Remove Hive/Hadoop/Java dependency from unit tests (#9029)                                                                                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26  | All classes in backport providers are now importable in Airflow 1.10 (#8991)                                                                                       |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [93ea05880](https://github.com/apache/airflow/commit/93ea05880283a56e3d42ab07db7453977a3de8ec) | 2020-04-21  | [AIRFLOW-7059] pass hive_conf to get_pandas_df in HiveServer2Hook (#8380)                                                                                          |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)                                                                                                   |
-| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30  | Remove sql like function in base_hook (#7901)                                                                                                                      |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                                                                                   |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                                                                                                                 |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03  | [AIRFLOW-4681] Make sensors module pylint compatible (#7309)                                                                                                       |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29  | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286)                                                                        |
-| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21  | [AIRFLOW-6610] Move software classes to providers package (#7231)                                                                                                  |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142)                                                                                     |
diff --git a/airflow/providers/apache/hive/README.md b/airflow/providers/apache/hive/README.md
deleted file mode 100644
index d232982..0000000
--- a/airflow/providers/apache/hive/README.md
+++ /dev/null
@@ -1,228 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-apache-hive
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Cross provider package dependencies](#cross-provider-package-dependencies)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [Moved operators](#moved-operators)
-    - [Transfer operators](#transfer-operators)
-        - [Moved transfer operators](#moved-transfer-operators)
-    - [Sensors](#sensors)
-        - [Moved sensors](#moved-sensors)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `apache.hive` provider. All classes for this provider package
-are in `airflow.providers.apache.hive` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-apache-hive`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| hmsclient     | &gt;=0.1.0            |
-| pyhive[hive]  | &gt;=0.6.0            |
-
-## Cross provider package dependencies
-
-Those are dependencies that might be needed in order to use all the features of the package.
-You need to install the specified backport providers package in order to use them.
-
-You can install such cross-provider dependencies when installing from PyPI. For example:
-
-```bash
-pip install apache-airflow-providers-apache-hive[amazon]
-```
-
-| Dependent package                                                                                             | Extra           |
-|:--------------------------------------------------------------------------------------------------------------|:----------------|
-| [apache-airflow-providers-amazon](https://pypi.org/project/apache-airflow-providers-amazon)                   | amazon          |
-| [apache-airflow-providers-microsoft-mssql](https://pypi.org/project/apache-airflow-providers-microsoft-mssql) | microsoft.mssql |
-| [apache-airflow-providers-mysql](https://pypi.org/project/apache-airflow-providers-mysql)                     | mysql           |
-| [apache-airflow-providers-presto](https://pypi.org/project/apache-airflow-providers-presto)                   | presto          |
-| [apache-airflow-providers-samba](https://pypi.org/project/apache-airflow-providers-samba)                     | samba           |
-| [apache-airflow-providers-vertica](https://pypi.org/project/apache-airflow-providers-vertica)                 | vertica         |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.hive` provider
-are in the `airflow.providers.apache.hive` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.apache.hive` package                                                                                          | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                              |
-|:--------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.hive.HiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive.py)                            | [operators.hive_operator.HiveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_operator.py)                            |
-| [operators.hive_stats.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive_stats.py) | [operators.hive_stats_operator.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_stats_operator.py) |
-
-
-## Transfer operators
-
-
-
-### Moved transfer operators
-
-| Airflow 2.0 transfers: `airflow.providers.apache.hive` package                                                                                              | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                |
-|:------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [transfers.hive_to_mysql.HiveToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/hive_to_mysql.py)       | [operators.hive_to_mysql.HiveToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_mysql.py)                       |
-| [transfers.hive_to_samba.HiveToSambaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/hive_to_samba.py)       | [operators.hive_to_samba_operator.HiveToSambaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_samba_operator.py)     |
-| [transfers.mssql_to_hive.MsSqlToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/mssql_to_hive.py)       | [operators.mssql_to_hive.MsSqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mssql_to_hive.py)                       |
-| [transfers.mysql_to_hive.MySqlToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/mysql_to_hive.py)       | [operators.mysql_to_hive.MySqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_to_hive.py)                       |
-| [transfers.s3_to_hive.S3ToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/s3_to_hive.py)                | [operators.s3_to_hive_operator.S3ToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_hive_operator.py)              |
-| [transfers.vertica_to_hive.VerticaToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/vertica_to_hive.py) | [contrib.operators.vertica_to_hive.VerticaToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_hive.py) |
-
-
-## Sensors
-
-
-
-### Moved sensors
-
-| Airflow 2.0 sensors: `airflow.providers.apache.hive` package                                                                                                         | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                       |
-|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [sensors.hive_partition.HivePartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/sensors/hive_partition.py)                  | [sensors.hive_partition_sensor.HivePartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/hive_partition_sensor.py)                  |
-| [sensors.metastore_partition.MetastorePartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/sensors/metastore_partition.py)   | [sensors.metastore_partition_sensor.MetastorePartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/metastore_partition_sensor.py)   |
-| [sensors.named_hive_partition.NamedHivePartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/sensors/named_hive_partition.py) | [sensors.named_hive_partition_sensor.NamedHivePartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/named_hive_partition_sensor.py) |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.apache.hive` package                                                                | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                          |
-|:--------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------|
-| [hooks.hive.HiveCliHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/hooks/hive.py)       | [hooks.hive_hooks.HiveCliHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hive_hooks.py)       |
-| [hooks.hive.HiveMetastoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/hooks/hive.py) | [hooks.hive_hooks.HiveMetastoreHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hive_hooks.py) |
-| [hooks.hive.HiveServer2Hook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/hooks/hive.py)   | [hooks.hive_hooks.HiveServer2Hook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hive_hooks.py)   |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [a075b6df9](https://github.com/apache/airflow/commit/a075b6df99a4f5e21d198f7be56b577432e6f9db) | 2020-12-09  | Rename remaining Sensors to match AIP-21 (#12927)                                                                                                                  |
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                                                                                                 |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10  | Fix spelling in Python files (#12230)                                                                                                                              |
-| [502ba309e](https://github.com/apache/airflow/commit/502ba309ea470943f0e99c634269e3d2d13ce6ca) | 2020-11-10  | Enable Markdownlint rule - MD022/blanks-around-headings (#12225)                                                                                                   |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                                                                                               |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                                                                                                     |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                                                                                                 |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08  | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499)                                                                              |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25  | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)                                                                                             |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [27339a5a0](https://github.com/apache/airflow/commit/27339a5a0f9e382dbc7d32a128f0831a48ef9a12) | 2020-08-22  | Remove mentions of Airflow Gitter (#10460)                                                                                                                         |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                                                                                               |
-| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12  | DbApiHook: Support kwargs in get_pandas_df (#9730)                                                                                                                 |
-| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09  | Fix various typos in the repo (#10263)                                                                                                                             |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                                                                                                        |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                                                                                        |
-| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22  | More strict rules in mypy (#9705) (#9906)                                                                                                                          |
-| [5013fda8f](https://github.com/apache/airflow/commit/5013fda8f072e633c114fb39fb59a22f60200b40) | 2020-07-20  | Add drop_partition functionality for HiveMetastoreHook (#9472)                                                                                                     |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                                                                                                       |
-| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06  | Upgrade to latest pre-commit checks (#9686)                                                                                                                        |
-| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21  | Enable &amp; Fix Whitespace related PyDocStyle Checks (#9458)                                                                                                          |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [c78e2a5fe](https://github.com/apache/airflow/commit/c78e2a5feae15e84b05430cfc5935f0e289fb6b4) | 2020-06-16  | Make hive macros py3 compatible (#8598)                                                                                                                            |
-| [6350fd6eb](https://github.com/apache/airflow/commit/6350fd6ebb9958982cb3fa1d466168fc31708035) | 2020-06-08  | Don&#39;t use the term &#34;whitelist&#34; - language matters (#9174)                                                                                                          |
-| [10796cb7c](https://github.com/apache/airflow/commit/10796cb7ce52c8ac2f68024e531fdda779547bdf) | 2020-06-03  | Remove Hive/Hadoop/Java dependency from unit tests (#9029)                                                                                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26  | All classes in backport providers are now importable in Airflow 1.10 (#8991)                                                                                       |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [93ea05880](https://github.com/apache/airflow/commit/93ea05880283a56e3d42ab07db7453977a3de8ec) | 2020-04-21  | [AIRFLOW-7059] pass hive_conf to get_pandas_df in HiveServer2Hook (#8380)                                                                                          |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)                                                                                                   |
-| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30  | Remove sql like function in base_hook (#7901)                                                                                                                      |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                                                                                   |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                                                                                                                 |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03  | [AIRFLOW-4681] Make sensors module pylint compatible (#7309)                                                                                                       |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29  | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286)                                                                        |
-| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21  | [AIRFLOW-6610] Move software classes to providers package (#7231)                                                                                                  |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142)                                                                                     |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/apache/kylin/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/apache/kylin/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/apache/kylin/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/apache/kylin/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/kylin/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 9dc4fab..0000000
--- a/airflow/providers/apache/kylin/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,35 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29  | Don&#39;t use time.time() or timezone.utcnow() for duration calculations (#12353)  |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25  | Fix incorrect Usage of Optional[str] &amp; Optional[int] (#11141)                  |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                 |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25  | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530)          |
-| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12  | Enable Sphinx spellcheck for doc generation (#10280)                           |
-| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09  | Fix various typos in the repo (#10263)                                         |
-| [edc51e313](https://github.com/apache/airflow/commit/edc51e313b50359e0258cce5f7f7283f69342fb9) | 2020-08-08  | Remove Unnecessary list literal in Tuple for Kylin Operator (#10252)           |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                    |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)              |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                    |
-| [a2c5389a6](https://github.com/apache/airflow/commit/a2c5389a60f68482a60eb40c67b1542d827c187e) | 2020-07-14  | Add kylin operator (#9149)                                                     |
diff --git a/airflow/providers/apache/kylin/README.md b/airflow/providers/apache/kylin/README.md
deleted file mode 100644
index f4efc35..0000000
--- a/airflow/providers/apache/kylin/README.md
+++ /dev/null
@@ -1,128 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-apache-kylin
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [New operators](#new-operators)
-    - [Hooks](#hooks)
-        - [New hooks](#new-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `apache.kylin` provider. All classes for this provider package
-are in `airflow.providers.apache.kylin` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-apache-kylin`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| kylinpy       | &gt;=2.6              |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.kylin` provider
-are in the `airflow.providers.apache.kylin` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-### New operators
-
-| New Airflow 2.0 operators: `airflow.providers.apache.kylin` package                                                                            |
-|:-----------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.kylin_cube.KylinCubeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/kylin/operators/kylin_cube.py) |
-
-
-
-## Hooks
-
-
-### New hooks
-
-| New Airflow 2.0 hooks: `airflow.providers.apache.kylin` package                                                      |
-|:---------------------------------------------------------------------------------------------------------------------|
-| [hooks.kylin.KylinHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/kylin/hooks/kylin.py) |
-
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29  | Don&#39;t use time.time() or timezone.utcnow() for duration calculations (#12353)  |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25  | Fix incorrect Usage of Optional[str] &amp; Optional[int] (#11141)                  |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                 |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25  | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530)          |
-| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12  | Enable Sphinx spellcheck for doc generation (#10280)                           |
-| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09  | Fix various typos in the repo (#10263)                                         |
-| [edc51e313](https://github.com/apache/airflow/commit/edc51e313b50359e0258cce5f7f7283f69342fb9) | 2020-08-08  | Remove Unnecessary list literal in Tuple for Kylin Operator (#10252)           |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                    |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)              |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                    |
-| [a2c5389a6](https://github.com/apache/airflow/commit/a2c5389a60f68482a60eb40c67b1542d827c187e) | 2020-07-14  | Add kylin operator (#9149)                                                     |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/apache/livy/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/apache/livy/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/apache/livy/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/apache/livy/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/livy/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index c3ad1cc..0000000
--- a/airflow/providers/apache/livy/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,47 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24  | Use Python 3 style super classes (#11806)                                                                                                                          |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                                                                                                        |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                                                                                        |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                                                                                                       |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [d3cf23dc0](https://github.com/apache/airflow/commit/d3cf23dc07b5fb92ee2a5be07b0685a4fca36f86) | 2020-02-19  | [AIRFLOW-5470] Add Apache Livy REST operator (#6090)                                                                                                               |
diff --git a/airflow/providers/apache/livy/README.md b/airflow/providers/apache/livy/README.md
deleted file mode 100644
index b2fed11..0000000
--- a/airflow/providers/apache/livy/README.md
+++ /dev/null
@@ -1,162 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-apache-livy
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [Cross provider package dependencies](#cross-provider-package-dependencies)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [New operators](#new-operators)
-    - [Sensors](#sensors)
-        - [New sensors](#new-sensors)
-    - [Hooks](#hooks)
-        - [New hooks](#new-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `apache.livy` provider. All classes for this provider package
-are in `airflow.providers.apache.livy` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-apache-livy`
-
-## Cross provider package dependencies
-
-Those are dependencies that might be needed in order to use all the features of the package.
-You need to install the specified backport providers package in order to use them.
-
-You can install such cross-provider dependencies when installing from PyPI. For example:
-
-```bash
-pip install apache-airflow-providers-apache-livy[http]
-```
-
-| Dependent package                                                                       | Extra   |
-|:----------------------------------------------------------------------------------------|:--------|
-| [apache-airflow-providers-http](https://pypi.org/project/apache-airflow-providers-http) | http    |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.livy` provider
-are in the `airflow.providers.apache.livy` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-### New operators
-
-| New Airflow 2.0 operators: `airflow.providers.apache.livy` package                                                           |
-|:-----------------------------------------------------------------------------------------------------------------------------|
-| [operators.livy.LivyOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/livy/operators/livy.py) |
-
-
-
-## Sensors
-
-
-### New sensors
-
-| New Airflow 2.0 sensors: `airflow.providers.apache.livy` package                                                       |
-|:-----------------------------------------------------------------------------------------------------------------------|
-| [sensors.livy.LivySensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/livy/sensors/livy.py) |
-
-
-
-## Hooks
-
-
-### New hooks
-
-| New Airflow 2.0 hooks: `airflow.providers.apache.livy` package                                                   |
-|:-----------------------------------------------------------------------------------------------------------------|
-| [hooks.livy.LivyHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/livy/hooks/livy.py) |
-
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24  | Use Python 3 style super classes (#11806)                                                                                                                          |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                                                                                                        |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                                                                                        |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                                                                                                       |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [d3cf23dc0](https://github.com/apache/airflow/commit/d3cf23dc07b5fb92ee2a5be07b0685a4fca36f86) | 2020-02-19  | [AIRFLOW-5470] Add Apache Livy REST operator (#6090)                                                                                                               |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/apache/pig/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/apache/pig/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/apache/pig/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/apache/pig/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/pig/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 3d34579..0000000
--- a/airflow/providers/apache/pig/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,51 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                  |
-|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                          |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                     |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                       |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                               |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                               |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                           |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                   |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                  |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)           |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                       |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                          |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                            |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                               |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                       |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                       |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                             |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                               |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                     |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                             |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                       |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                         |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                              |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                  |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                               |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                              |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                        |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                              |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                             |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                           |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                               |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                              |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                   |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                            |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                              |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                             |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                  |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                             |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                  |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                         |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                                       |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)         |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                 |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                       |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142)           |
diff --git a/airflow/providers/apache/pig/README.md b/airflow/providers/apache/pig/README.md
deleted file mode 100644
index 3e25474..0000000
--- a/airflow/providers/apache/pig/README.md
+++ /dev/null
@@ -1,137 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-apache-pig
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [Moved operators](#moved-operators)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `apache.pig` provider. All classes for this provider package
-are in `airflow.providers.apache.pig` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-apache-pig`
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.pig` provider
-are in the `airflow.providers.apache.pig` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.apache.pig` package                                                            | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                |
-|:-------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------|
-| [operators.pig.PigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pig/operators/pig.py) | [operators.pig_operator.PigOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/pig_operator.py) |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.apache.pig` package                                                       | Airflow 1.10.* previous location (usually `airflow.contrib`)                                               |
-|:----------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------|
-| [hooks.pig.PigCliHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pig/hooks/pig.py) | [hooks.pig_hook.PigCliHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/pig_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                  |
-|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                          |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                     |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                       |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                               |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                               |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                           |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                   |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                  |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)           |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                       |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                          |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                            |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                               |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                       |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                       |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                             |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                               |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                     |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                             |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                       |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                         |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                              |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                  |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                               |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                              |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                        |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                              |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                             |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                           |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                               |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                              |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                   |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                            |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                              |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                             |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                  |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                             |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                  |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                         |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                                       |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)         |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                 |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                       |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142)           |
diff --git a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
index 195cce5..3bde4db 100644
--- a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
+++ b/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
@@ -4,7 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
 | [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
 | [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
 | [309b325c1](https://github.com/apache/airflow/commit/309b325c177d46f3cea8e1812a8728623c633604) | 2020-11-13  | Update deprecated Apache Pinot Broker API (#12333)                             |
diff --git a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/pinot/BACKPORT_PROVIDER_README.md
index 0107ac3..7b7b997 100644
--- a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_README.md
+++ b/airflow/providers/apache/pinot/BACKPORT_PROVIDER_README.md
@@ -85,7 +85,7 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
 | [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
 | [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
 | [309b325c1](https://github.com/apache/airflow/commit/309b325c177d46f3cea8e1812a8728623c633604) | 2020-11-13  | Update deprecated Apache Pinot Broker API (#12333)                             |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/apache/pinot/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/apache/pinot/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/apache/pinot/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/apache/pinot/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/pinot/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index d6470d3..0000000
--- a/airflow/providers/apache/pinot/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,47 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)    |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [309b325c1](https://github.com/apache/airflow/commit/309b325c177d46f3cea8e1812a8728623c633604) | 2020-11-13  | Update deprecated Apache Pinot Broker API (#12333)                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                 |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                   |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13  | Less aggressive eager upgrade of requirements (#8267)                          |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                               |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                             |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) |
diff --git a/airflow/providers/apache/pinot/README.md b/airflow/providers/apache/pinot/README.md
deleted file mode 100644
index a6f6d96..0000000
--- a/airflow/providers/apache/pinot/README.md
+++ /dev/null
@@ -1,128 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-apache-pinot
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `apache.pinot` provider. All classes for this provider package
-are in `airflow.providers.apache.pinot` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-apache-pinot`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| pinotdb       | ==0.1.1            |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.pinot` provider
-are in the `airflow.providers.apache.pinot` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.apache.pinot` package                                                               | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                       |
-|:--------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------|
-| [hooks.pinot.PinotAdminHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pinot/hooks/pinot.py) | [contrib.hooks.pinot_hook.PinotAdminHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/pinot_hook.py) |
-| [hooks.pinot.PinotDbApiHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pinot/hooks/pinot.py) | [contrib.hooks.pinot_hook.PinotDbApiHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/pinot_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)    |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [309b325c1](https://github.com/apache/airflow/commit/309b325c177d46f3cea8e1812a8728623c633604) | 2020-11-13  | Update deprecated Apache Pinot Broker API (#12333)                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                 |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                   |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13  | Less aggressive eager upgrade of requirements (#8267)                          |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                               |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                             |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/apache/spark/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/apache/spark/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/apache/spark/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/apache/spark/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/spark/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 25acd98..0000000
--- a/airflow/providers/apache/spark/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,64 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                          |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                  |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                             |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                                 |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                       |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                       |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                   |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                           |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                          |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                   |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                               |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                  |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                    |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                       |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                               |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                               |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                     |
-| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12  | Remove redundant None provided as default to dict.get() (#11448)                                 |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                       |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                             |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                     |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                               |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                      |
-| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25  | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)                           |
-| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25  | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530)                            |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                          |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                       |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                             |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                                      |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                      |
-| [1427e4acb](https://github.com/apache/airflow/commit/1427e4acb4a1dc5be28cfeef75c90032d515aab6) | 2020-07-22  | Update Spark submit operator for Spark 3 support (#8730)                                         |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                                     |
-| [0873070e0](https://github.com/apache/airflow/commit/0873070e08f7216b6949e7de4e2329175a764321) | 2020-07-11  | Mask other forms of password arguments in SparkSubmitOperator (#9615)                            |
-| [13a827d80](https://github.com/apache/airflow/commit/13a827d80fef738e25f30ea20c095ad4dbd401f6) | 2020-07-09  | Ensure Kerberos token is valid in SparkSubmitOperator before running `yarn kill` (#9044)         |
-| [067806d59](https://github.com/apache/airflow/commit/067806d5985301f21da78f0a81056dbec348e6ba) | 2020-06-29  | Add tests for spark_jdbc_script (#9491)                                                          |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                   |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                       |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                      |
-| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18  | Detect automatically the lack of reference to the guide in the operator descriptions (#9290)     |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                           |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                    |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                      |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                     |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                          |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                     |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                          |
-| [7506c73f1](https://github.com/apache/airflow/commit/7506c73f1721151e9c50ef8bdb70d2136a16190b) | 2020-05-10  | Add default `conf` parameter to Spark JDBC Hook (#8787)                                          |
-| [487b5cc50](https://github.com/apache/airflow/commit/487b5cc50c5b28a045cb12a1527a5453b0a6a7af) | 2020-05-06  | Add guide for Apache Spark operators (#8305)                                                     |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)                                 |
-| [be1451b0e](https://github.com/apache/airflow/commit/be1451b0e1b7e33f4621e24649f6a4fa87c34e01) | 2020-04-02  | [AIRFLOW-7026] Improve SparkSqlHook&#39;s error message (#7749)                                      |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                 |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                                               |
-| [2327aa5a2](https://github.com/apache/airflow/commit/2327aa5a263f25beeaf4ba79670f10f001daf0bf) | 2020-03-12  | [AIRFLOW-7025] Fix SparkSqlHook.run_query to handle its parameter properly (#7677)               |
-| [024b4bf96](https://github.com/apache/airflow/commit/024b4bf962bc30ecb70da9650e68b523a0dbcff8) | 2020-03-10  | [AIRFLOW-7024] Add the verbose parameter support to SparkSqlOperator (#7676)                     |
-| [b59042b5a](https://github.com/apache/airflow/commit/b59042b5ab083c77ba08ba804df76b7c728815dc) | 2020-02-28  | [AIRFLOW-6949] Respect explicit `spark.kubernetes.namespace` conf to SparkSubmitOperator (#7575) |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                         |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142)                   |
diff --git a/airflow/providers/apache/spark/README.md b/airflow/providers/apache/spark/README.md
deleted file mode 100644
index 02308e7..0000000
--- a/airflow/providers/apache/spark/README.md
+++ /dev/null
@@ -1,161 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-apache-spark
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [Moved operators](#moved-operators)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `apache.spark` provider. All classes for this provider package
-are in `airflow.providers.apache.spark` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-apache-spark`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| pyspark       |                    |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.spark` provider
-are in the `airflow.providers.apache.spark` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.apache.spark` package                                                                                      | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                          |
-|:-----------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.spark_jdbc.SparkJDBCOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/operators/spark_jdbc.py)       | [contrib.operators.spark_jdbc_operator.SparkJDBCOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/spark_jdbc_operator.py)       |
-| [operators.spark_sql.SparkSqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/operators/spark_sql.py)          | [contrib.operators.spark_sql_operator.SparkSqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/spark_sql_operator.py)          |
-| [operators.spark_submit.SparkSubmitOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/operators/spark_submit.py) | [contrib.operators.spark_submit_operator.SparkSubmitOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/spark_submit_operator.py) |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.apache.spark` package                                                                              | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                      |
-|:-----------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------|
-| [hooks.spark_jdbc.SparkJDBCHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/hooks/spark_jdbc.py)       | [contrib.hooks.spark_jdbc_hook.SparkJDBCHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/spark_jdbc_hook.py)       |
-| [hooks.spark_sql.SparkSqlHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/hooks/spark_sql.py)          | [contrib.hooks.spark_sql_hook.SparkSqlHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/spark_sql_hook.py)          |
-| [hooks.spark_submit.SparkSubmitHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/hooks/spark_submit.py) | [contrib.hooks.spark_submit_hook.SparkSubmitHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/spark_submit_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                          |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                  |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                             |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                                 |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                       |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                       |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                   |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                           |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                          |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                   |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                               |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                  |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                    |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                       |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                               |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                               |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                     |
-| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12  | Remove redundant None provided as default to dict.get() (#11448)                                 |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                       |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                             |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                     |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                               |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                      |
-| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25  | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)                           |
-| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25  | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530)                            |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                          |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                       |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                             |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                                      |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                      |
-| [1427e4acb](https://github.com/apache/airflow/commit/1427e4acb4a1dc5be28cfeef75c90032d515aab6) | 2020-07-22  | Update Spark submit operator for Spark 3 support (#8730)                                         |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                                     |
-| [0873070e0](https://github.com/apache/airflow/commit/0873070e08f7216b6949e7de4e2329175a764321) | 2020-07-11  | Mask other forms of password arguments in SparkSubmitOperator (#9615)                            |
-| [13a827d80](https://github.com/apache/airflow/commit/13a827d80fef738e25f30ea20c095ad4dbd401f6) | 2020-07-09  | Ensure Kerberos token is valid in SparkSubmitOperator before running `yarn kill` (#9044)         |
-| [067806d59](https://github.com/apache/airflow/commit/067806d5985301f21da78f0a81056dbec348e6ba) | 2020-06-29  | Add tests for spark_jdbc_script (#9491)                                                          |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                   |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                       |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                      |
-| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18  | Detect automatically the lack of reference to the guide in the operator descriptions (#9290)     |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                           |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                    |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                      |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                     |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                          |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                     |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                          |
-| [7506c73f1](https://github.com/apache/airflow/commit/7506c73f1721151e9c50ef8bdb70d2136a16190b) | 2020-05-10  | Add default `conf` parameter to Spark JDBC Hook (#8787)                                          |
-| [487b5cc50](https://github.com/apache/airflow/commit/487b5cc50c5b28a045cb12a1527a5453b0a6a7af) | 2020-05-06  | Add guide for Apache Spark operators (#8305)                                                     |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)                                 |
-| [be1451b0e](https://github.com/apache/airflow/commit/be1451b0e1b7e33f4621e24649f6a4fa87c34e01) | 2020-04-02  | [AIRFLOW-7026] Improve SparkSqlHook&#39;s error message (#7749)                                      |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                 |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                                               |
-| [2327aa5a2](https://github.com/apache/airflow/commit/2327aa5a263f25beeaf4ba79670f10f001daf0bf) | 2020-03-12  | [AIRFLOW-7025] Fix SparkSqlHook.run_query to handle its parameter properly (#7677)               |
-| [024b4bf96](https://github.com/apache/airflow/commit/024b4bf962bc30ecb70da9650e68b523a0dbcff8) | 2020-03-10  | [AIRFLOW-7024] Add the verbose parameter support to SparkSqlOperator (#7676)                     |
-| [b59042b5a](https://github.com/apache/airflow/commit/b59042b5ab083c77ba08ba804df76b7c728815dc) | 2020-02-28  | [AIRFLOW-6949] Respect explicit `spark.kubernetes.namespace` conf to SparkSubmitOperator (#7575) |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                         |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142)                   |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/apache/sqoop/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/apache/sqoop/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/apache/sqoop/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/apache/sqoop/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/sqoop/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index c878b4f..0000000
--- a/airflow/providers/apache/sqoop/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,46 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                    |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                    |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                   |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                               |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                             |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) |
diff --git a/airflow/providers/apache/sqoop/README.md b/airflow/providers/apache/sqoop/README.md
deleted file mode 100644
index aeb2449..0000000
--- a/airflow/providers/apache/sqoop/README.md
+++ /dev/null
@@ -1,132 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-apache-sqoop
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [Moved operators](#moved-operators)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `apache.sqoop` provider. All classes for this provider package
-are in `airflow.providers.apache.sqoop` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-apache-sqoop`
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.sqoop` provider
-are in the `airflow.providers.apache.sqoop` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.apache.sqoop` package                                                                  | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                      |
-|:---------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.sqoop.SqoopOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/sqoop/operators/sqoop.py) | [contrib.operators.sqoop_operator.SqoopOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sqoop_operator.py) |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.apache.sqoop` package                                                          | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                  |
-|:---------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------|
-| [hooks.sqoop.SqoopHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/sqoop/hooks/sqoop.py) | [contrib.hooks.sqoop_hook.SqoopHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/sqoop_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05  | Enforce keyword only arguments on apache operators (#10170)                    |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                    |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                   |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                               |
-| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23  | Add call to Super call in apache providers (#7820)                             |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12  | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/celery/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/celery/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/celery/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/celery/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/celery/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 1b0859b..0000000
--- a/airflow/providers/celery/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,41 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)           |
-| [5bb228d84](https://github.com/apache/airflow/commit/5bb228d841585cd1780c15f6175c6d64cd98aeab) | 2020-07-11  | improve type hinting for celery provider (#9762)                               |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21  | [AIRFLOW-6610] Move software classes to providers package (#7231)              |
diff --git a/airflow/providers/celery/README.md b/airflow/providers/celery/README.md
deleted file mode 100644
index f258299..0000000
--- a/airflow/providers/celery/README.md
+++ /dev/null
@@ -1,123 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-celery
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Sensors](#sensors)
-        - [Moved sensors](#moved-sensors)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `celery` provider. All classes for this provider package
-are in `airflow.providers.celery` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-celery`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| celery        | ~=4.4.2            |
-| flower        | &gt;=0.7.3, &lt;1.0      |
-| vine          | ~=1.3              |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `celery` provider
-are in the `airflow.providers.celery` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Sensors
-
-
-
-### Moved sensors
-
-| Airflow 2.0 sensors: `airflow.providers.celery` package                                                                                  | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                |
-|:-----------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [sensors.celery_queue.CeleryQueueSensor](https://github.com/apache/airflow/blob/master/airflow/providers/celery/sensors/celery_queue.py) | [contrib.sensors.celery_queue_sensor.CeleryQueueSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/celery_queue_sensor.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)           |
-| [5bb228d84](https://github.com/apache/airflow/commit/5bb228d841585cd1780c15f6175c6d64cd98aeab) | 2020-07-11  | improve type hinting for celery provider (#9762)                               |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21  | [AIRFLOW-6610] Move software classes to providers package (#7231)              |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/cloudant/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/cloudant/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/cloudant/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/cloudant/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/cloudant/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 2ab48ac..0000000
--- a/airflow/providers/cloudant/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,42 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                 |
-|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                         |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                    |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                      |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                              |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                              |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                          |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                  |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                 |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)          |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                      |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                         |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                           |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                       |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                              |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                      |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                            |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                              |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                    |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                            |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                             |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                 |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                              |
-| [35fe97225](https://github.com/apache/airflow/commit/35fe97225ee0a29aa350bb6ed805428fd707ab2f) | 2020-07-15  | Improve type hinting to provider cloudant (#9825)                                       |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                          |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                              |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                             |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                  |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                           |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                             |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                            |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                 |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                            |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                 |
-| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23  | Add missing call to Super class in &#39;amazon&#39;, &#39;cloudant &amp; &#39;databricks&#39; providers (#7827) |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)                |
diff --git a/airflow/providers/cloudant/README.md b/airflow/providers/cloudant/README.md
deleted file mode 100644
index b7c6ac4..0000000
--- a/airflow/providers/cloudant/README.md
+++ /dev/null
@@ -1,122 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-cloudant
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `cloudant` provider. All classes for this provider package
-are in `airflow.providers.cloudant` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-cloudant`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| cloudant      | &gt;=2.0              |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `cloudant` provider
-are in the `airflow.providers.cloudant` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.cloudant` package                                                                   | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                           |
-|:--------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------|
-| [hooks.cloudant.CloudantHook](https://github.com/apache/airflow/blob/master/airflow/providers/cloudant/hooks/cloudant.py) | [contrib.hooks.cloudant_hook.CloudantHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/cloudant_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                 |
-|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                         |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                    |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                      |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                              |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                              |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                          |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                  |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                 |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)          |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                      |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                         |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                           |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                       |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                              |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                      |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                            |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                              |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                    |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                            |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                             |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                 |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                              |
-| [35fe97225](https://github.com/apache/airflow/commit/35fe97225ee0a29aa350bb6ed805428fd707ab2f) | 2020-07-15  | Improve type hinting to provider cloudant (#9825)                                       |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                          |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                              |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                             |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                  |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                           |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                             |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                            |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                 |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                            |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                 |
-| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23  | Add missing call to Super class in &#39;amazon&#39;, &#39;cloudant &amp; &#39;databricks&#39; providers (#7827) |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)                |
diff --git a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
index 18206b7..5e604b5 100644
--- a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
+++ b/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
@@ -4,7 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
 | [763b40d22](https://github.com/apache/airflow/commit/763b40d223e5e5512494a97f8335e16960e6adc3) | 2020-11-18  | Raise correct Warning in kubernetes/backcompat/volume_mount.py (#12432)        |
 | [bc4bb3058](https://github.com/apache/airflow/commit/bc4bb30588607b10b069ab63ddf2ba7b7ee673ed) | 2020-11-18  | Fix docstrings for Kubernetes Backcompat module (#12422)                       |
 | [cab86d80d](https://github.com/apache/airflow/commit/cab86d80d48227849906319917126f6d558b2e00) | 2020-11-17  | Make K8sPodOperator backwards compatible (#12384)                              |
diff --git a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_README.md b/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_README.md
index 32b206c..b65fa03 100644
--- a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_README.md
+++ b/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_README.md
@@ -123,7 +123,7 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
 | [763b40d22](https://github.com/apache/airflow/commit/763b40d223e5e5512494a97f8335e16960e6adc3) | 2020-11-18  | Raise correct Warning in kubernetes/backcompat/volume_mount.py (#12432)        |
 | [bc4bb3058](https://github.com/apache/airflow/commit/bc4bb30588607b10b069ab63ddf2ba7b7ee673ed) | 2020-11-18  | Fix docstrings for Kubernetes Backcompat module (#12422)                       |
 | [cab86d80d](https://github.com/apache/airflow/commit/cab86d80d48227849906319917126f6d558b2e00) | 2020-11-17  | Make K8sPodOperator backwards compatible (#12384)                              |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/cncf/kubernetes/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/cncf/kubernetes/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/cncf/kubernetes/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/cncf/kubernetes/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/cncf/kubernetes/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 217060e..0000000
--- a/airflow/providers/cncf/kubernetes/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,101 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                      |
-|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                              |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                         |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                             |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                           |
-| [de3b1e687](https://github.com/apache/airflow/commit/de3b1e687b26c524c6909b7b4dfbb60d25019751) | 2020-11-28  | Move connection guides to provider documentation packages (#12653)                           |
-| [c02a3f59e](https://github.com/apache/airflow/commit/c02a3f59e45d3cdd0e4c1c3bda2c62b951bcbea3) | 2020-11-23  | Spark-on-k8s sensor logs - properly pass defined namespace to pod log call (#11199)          |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                   |
-| [9e089ab89](https://github.com/apache/airflow/commit/9e089ab89567b0a52b232f22ed3e708a05137924) | 2020-11-19  | Fix Kube tests (#12479)                                                                      |
-| [d32fe78c0](https://github.com/apache/airflow/commit/d32fe78c0d9d14f016df70a462dc3972f28abe9d) | 2020-11-18  | Update readmes for cncf.kube provider fixes (#12457)                                         |
-| [d84a52dc8](https://github.com/apache/airflow/commit/d84a52dc8fc597d89c5bb4941df67f5f35b70a29) | 2020-11-18  | Fix broken example_kubernetes DAG (#12455)                                                   |
-| [7c8b71d20](https://github.com/apache/airflow/commit/7c8b71d2012d56888f21b24c4844a6838dc3e4b1) | 2020-11-18  | Fix backwards compatibility further (#12451)                                                 |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                   |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)                  |
-| [763b40d22](https://github.com/apache/airflow/commit/763b40d223e5e5512494a97f8335e16960e6adc3) | 2020-11-18  | Raise correct Warning in kubernetes/backcompat/volume_mount.py (#12432)                      |
-| [bc4bb3058](https://github.com/apache/airflow/commit/bc4bb30588607b10b069ab63ddf2ba7b7ee673ed) | 2020-11-18  | Fix docstrings for Kubernetes Backcompat module (#12422)                                     |
-| [cab86d80d](https://github.com/apache/airflow/commit/cab86d80d48227849906319917126f6d558b2e00) | 2020-11-17  | Make K8sPodOperator backwards compatible (#12384)                                            |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                               |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                       |
-| [221f809c1](https://github.com/apache/airflow/commit/221f809c1b4e4b78d5a437d012aa7daffd8410a4) | 2020-11-14  | Fix full_pod_spec for k8spodoperator (#12354)                                                |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                      |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)               |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                           |
-| [3f59e75cd](https://github.com/apache/airflow/commit/3f59e75cdf4a95829ac60b151135e03267e63a12) | 2020-11-09  | KubernetesPodOperator: use randomized name to get the failure status (#12171)                |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                              |
-| [7825be50d](https://github.com/apache/airflow/commit/7825be50d80d04da0db8fcee55df5e1339864c88) | 2020-11-05  | Randomize pod name (#12117)                                                                  |
-| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04  | Format all files (without excepions) by black (#12091)                                       |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                            |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                   |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                           |
-| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21  | Enforce strict rules for yamllint (#11709)                                                   |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                           |
-| [eee4e30f2](https://github.com/apache/airflow/commit/eee4e30f2caf02e16088ff5d1af1ea380a73e982) | 2020-10-15  | Add better debug logging to K8sexec and K8sPodOp (#11502)                                    |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                 |
-| [8640fb6c1](https://github.com/apache/airflow/commit/8640fb6c100a2c6aa231798559ba194331576975) | 2020-10-09  | fix tests (#11368)                                                                           |
-| [298052fce](https://github.com/apache/airflow/commit/298052fcee9d30b1f60b8dc1c9006398cd16645e) | 2020-10-10  | [airflow/providers/cncf/kubernetes] correct hook methods name (#11008)                       |
-| [49aad025b](https://github.com/apache/airflow/commit/49aad025b53211a5815b10aa35f7d7b489cb5316) | 2020-10-09  | Users can specify sub-secrets and paths k8spodop (#11369)                                    |
-| [b93b6c5be](https://github.com/apache/airflow/commit/b93b6c5be3ab60960f650d0d4ee6c91271ac7909) | 2020-10-05  | Allow labels in KubernetesPodOperator to be templated (#10796)                               |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                   |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                         |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                 |
-| [a888198c2](https://github.com/apache/airflow/commit/a888198c27bcdbc4538c02360c308ffcaca182fa) | 2020-09-27  | Allow overrides for pod_template_file (#11162)                                               |
-| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26  | Increasing type coverage for multiple provider (#11159)                                      |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                               |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                           |
-| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21  | Add D204 pydocstyle check (#11031)                                                           |
-| [cba51d49e](https://github.com/apache/airflow/commit/cba51d49eea6a0563044191c8111978836d697ef) | 2020-09-17  | Simplify the K8sExecutor and K8sPodOperator (#10393)                                         |
-| [1294e15d4](https://github.com/apache/airflow/commit/1294e15d44c08498e7f1022fdd6f0bc5e50e533f) | 2020-09-16  | KubernetesPodOperator template fix (#10963)                                                  |
-| [5d6d5a2f7](https://github.com/apache/airflow/commit/5d6d5a2f7d330c83297e1dc35728a0ba803aa866) | 2020-09-14  | Allow to specify path to kubeconfig in KubernetesHook (#10453)                               |
-| [7edfac957](https://github.com/apache/airflow/commit/7edfac957bc17c9abcdcfe8d524772bd2783ac5a) | 2020-09-09  | Add connection caching to KubernetesHook (#10447)                                            |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                             |
-| [90c150568](https://github.com/apache/airflow/commit/90c1505686b063332dba87c0c948a8b29d8fd1d4) | 2020-09-04  | Make grace_period_seconds option on K8sPodOperator (#10727)                                  |
-| [338b412c0](https://github.com/apache/airflow/commit/338b412c04abc3fef8126f9724b448d1a9fd0bbc) | 2020-09-02  | Add on_kill support for the KubernetesPodOperator (#10666)                                   |
-| [596bc1337](https://github.com/apache/airflow/commit/596bc1337988f9377571295ddb748ef8703c19c0) | 2020-08-31  | Adds &#39;cncf.kubernetes&#39; package back to backport provider packages. (#10659)                  |
-| [1e5aa4465](https://github.com/apache/airflow/commit/1e5aa4465c5ef8f05745bda64da62fe542f2fe28) | 2020-08-26  | Spark-on-K8S sensor - add driver logs (#10023)                                               |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                  |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                               |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                         |
-| [8cd2be9e1](https://github.com/apache/airflow/commit/8cd2be9e161635480581a0dc723b69ed24166f8d) | 2020-08-11  | Fix KubernetesPodOperator reattachment (#10230)                                              |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                  |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)                         |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                         |
-| [f1fd3e2c4](https://github.com/apache/airflow/commit/f1fd3e2c453ddce3e87ce63787598fea0707ffcf) | 2020-07-31  | Fix typo on reattach property of kubernetespodoperator (#10056)                              |
-| [03c435174](https://github.com/apache/airflow/commit/03c43517445019081c55b4ac5fad3b0debdee336) | 2020-07-31  | Allow `image` in `KubernetesPodOperator` to be templated (#10068)                            |
-| [88c160306](https://github.com/apache/airflow/commit/88c1603060fd484d4145bc253c0dc0e6797e13dd) | 2020-07-31  | Improve docstring note about GKEStartPodOperator on KubernetesPodOperator (#10049)           |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                            |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                  |
-| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22  | More strict rules in mypy (#9705) (#9906)                                                    |
-| [719ae2bf6](https://github.com/apache/airflow/commit/719ae2bf6227894c3e926f717eb4dc669549d615) | 2020-07-22  | Dump Pod as YAML in logs for KubernetesPodOperator (#9895)                                   |
-| [840799d55](https://github.com/apache/airflow/commit/840799d5597f0d005e1deec154f6c95bad6dce61) | 2020-07-20  | Improve KubernetesPodOperator guide (#9079)                                                  |
-| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06  | Upgrade to latest pre-commit checks (#9686)                                                  |
-| [8bd15ef63](https://github.com/apache/airflow/commit/8bd15ef634cca40f3cf6ca3442262f3e05144512) | 2020-07-01  | Switches to Helm Chart for Kubernetes tests (#9468)                                          |
-| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18  | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) |
-| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                               |
-| [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23  | Fix typo in test_project_structure (#8978)                                                   |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                 |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                      |
-| [8985df0bf](https://github.com/apache/airflow/commit/8985df0bfcb5f2b2cd69a21b9814021f9f8ce953) | 2020-05-16  | Monitor pods by labels instead of names (#6377)                                              |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                 |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                      |
-| [f82ad452b](https://github.com/apache/airflow/commit/f82ad452b0f4ebd1428bc9669641a632dc87bb8c) | 2020-05-15  | Fix KubernetesPodOperator pod name length validation (#8829)                                 |
-| [1ccafc617](https://github.com/apache/airflow/commit/1ccafc617c4cb9622e3460ad7c190f3ee67c3b32) | 2020-04-02  | Add spark_kubernetes system test (#7875)                                                     |
-| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23  | Add missing call to Super class in &#39;cncf&#39; &amp; &#39;docker&#39; providers (#7825)                       |
-| [6c39a3bf9](https://github.com/apache/airflow/commit/6c39a3bf97414ba2438669894db65c36ccbeb61a) | 2020-03-10  | [AIRFLOW-6542] Add spark-on-k8s operator/hook/sensor (#7163)                                 |
-| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07  | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506)                             |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)             |
-| [0ec277412](https://github.com/apache/airflow/commit/0ec2774120d43fa667a371b384e6006e1d1c7821) | 2020-02-24  | [AIRFLOW-5629] Implement Kubernetes priorityClassName in KubernetesPodOperator (#7395)       |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412)     |
-| [967930c0c](https://github.com/apache/airflow/commit/967930c0cb6e2293f2a49e5c9add5aa1917f3527) | 2020-02-11  | [AIRFLOW-5413] Allow K8S worker pod to be configured from JSON/YAML file (#6230)             |
-| [96f834389](https://github.com/apache/airflow/commit/96f834389e03884025534fabd862155061f53fd0) | 2020-02-03  | [AIRFLOW-6678] Pull event logs from Kubernetes (#7292)                                       |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                     |
-| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02  | [AIRFLOW-6708] Set unique logger names (#7330)                                               |
-| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30  | [AIRFLOW-6682] Move GCP classes to providers package (#7295)                                 |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                           |
-| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21  | [AIRFLOW-6610] Move software classes to providers package (#7231)                            |
diff --git a/airflow/providers/cncf/kubernetes/README.md b/airflow/providers/cncf/kubernetes/README.md
deleted file mode 100644
index ba940e9..0000000
--- a/airflow/providers/cncf/kubernetes/README.md
+++ /dev/null
@@ -1,221 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-cncf-kubernetes
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [New operators](#new-operators)
-        - [Moved operators](#moved-operators)
-    - [Sensors](#sensors)
-        - [New sensors](#new-sensors)
-    - [Hooks](#hooks)
-        - [New hooks](#new-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `cncf.kubernetes` provider. All classes for this provider package
-are in `airflow.providers.cncf.kubernetes` python package.
-
-
-## Additional limitations
-
-This provider is only usable with Apache Airflow &gt;= 1.10.12 version due to refactorings implemented in
-Apache Airflow 1.10.11 and fixes implemented in 1.10.11. The package has appropriate requirements
-set so you should not be able to install it with Apache Airflow &lt; 1.10.12.
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-cncf-kubernetes`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| cryptography  | &gt;=2.0.0            |
-| kubernetes    | &gt;=3.0.0, &lt;12.0.0   |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `cncf.kubernetes` provider
-are in the `airflow.providers.cncf.kubernetes` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-### New operators
-
-| New Airflow 2.0 operators: `airflow.providers.cncf.kubernetes` package                                                                                              |
-|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.spark_kubernetes.SparkKubernetesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py) |
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.cncf.kubernetes` package                                                                                            | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                |
-|:--------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.kubernetes_pod.KubernetesPodOperator](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py) | [contrib.operators.kubernetes_pod_operator.KubernetesPodOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/kubernetes_pod_operator.py) |
-
-
-## Sensors
-
-
-### New sensors
-
-| New Airflow 2.0 sensors: `airflow.providers.cncf.kubernetes` package                                                                                          |
-|:--------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [sensors.spark_kubernetes.SparkKubernetesSensor](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py) |
-
-
-
-## Hooks
-
-
-### New hooks
-
-| New Airflow 2.0 hooks: `airflow.providers.cncf.kubernetes` package                                                                     |
-|:---------------------------------------------------------------------------------------------------------------------------------------|
-| [hooks.kubernetes.KubernetesHook](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/hooks/kubernetes.py) |
-
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                      |
-|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                              |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                         |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                             |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                           |
-| [de3b1e687](https://github.com/apache/airflow/commit/de3b1e687b26c524c6909b7b4dfbb60d25019751) | 2020-11-28  | Move connection guides to provider documentation packages (#12653)                           |
-| [c02a3f59e](https://github.com/apache/airflow/commit/c02a3f59e45d3cdd0e4c1c3bda2c62b951bcbea3) | 2020-11-23  | Spark-on-k8s sensor logs - properly pass defined namespace to pod log call (#11199)          |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                   |
-| [9e089ab89](https://github.com/apache/airflow/commit/9e089ab89567b0a52b232f22ed3e708a05137924) | 2020-11-19  | Fix Kube tests (#12479)                                                                      |
-| [d32fe78c0](https://github.com/apache/airflow/commit/d32fe78c0d9d14f016df70a462dc3972f28abe9d) | 2020-11-18  | Update readmes for cncf.kube provider fixes (#12457)                                         |
-| [d84a52dc8](https://github.com/apache/airflow/commit/d84a52dc8fc597d89c5bb4941df67f5f35b70a29) | 2020-11-18  | Fix broken example_kubernetes DAG (#12455)                                                   |
-| [7c8b71d20](https://github.com/apache/airflow/commit/7c8b71d2012d56888f21b24c4844a6838dc3e4b1) | 2020-11-18  | Fix backwards compatibility further (#12451)                                                 |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                   |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)                  |
-| [763b40d22](https://github.com/apache/airflow/commit/763b40d223e5e5512494a97f8335e16960e6adc3) | 2020-11-18  | Raise correct Warning in kubernetes/backcompat/volume_mount.py (#12432)                      |
-| [bc4bb3058](https://github.com/apache/airflow/commit/bc4bb30588607b10b069ab63ddf2ba7b7ee673ed) | 2020-11-18  | Fix docstrings for Kubernetes Backcompat module (#12422)                                     |
-| [cab86d80d](https://github.com/apache/airflow/commit/cab86d80d48227849906319917126f6d558b2e00) | 2020-11-17  | Make K8sPodOperator backwards compatible (#12384)                                            |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                               |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                       |
-| [221f809c1](https://github.com/apache/airflow/commit/221f809c1b4e4b78d5a437d012aa7daffd8410a4) | 2020-11-14  | Fix full_pod_spec for k8spodoperator (#12354)                                                |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                      |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)               |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                           |
-| [3f59e75cd](https://github.com/apache/airflow/commit/3f59e75cdf4a95829ac60b151135e03267e63a12) | 2020-11-09  | KubernetesPodOperator: use randomized name to get the failure status (#12171)                |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                              |
-| [7825be50d](https://github.com/apache/airflow/commit/7825be50d80d04da0db8fcee55df5e1339864c88) | 2020-11-05  | Randomize pod name (#12117)                                                                  |
-| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04  | Format all files (without excepions) by black (#12091)                                       |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                            |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                   |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                           |
-| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21  | Enforce strict rules for yamllint (#11709)                                                   |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                           |
-| [eee4e30f2](https://github.com/apache/airflow/commit/eee4e30f2caf02e16088ff5d1af1ea380a73e982) | 2020-10-15  | Add better debug logging to K8sexec and K8sPodOp (#11502)                                    |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                 |
-| [8640fb6c1](https://github.com/apache/airflow/commit/8640fb6c100a2c6aa231798559ba194331576975) | 2020-10-09  | fix tests (#11368)                                                                           |
-| [298052fce](https://github.com/apache/airflow/commit/298052fcee9d30b1f60b8dc1c9006398cd16645e) | 2020-10-10  | [airflow/providers/cncf/kubernetes] correct hook methods name (#11008)                       |
-| [49aad025b](https://github.com/apache/airflow/commit/49aad025b53211a5815b10aa35f7d7b489cb5316) | 2020-10-09  | Users can specify sub-secrets and paths k8spodop (#11369)                                    |
-| [b93b6c5be](https://github.com/apache/airflow/commit/b93b6c5be3ab60960f650d0d4ee6c91271ac7909) | 2020-10-05  | Allow labels in KubernetesPodOperator to be templated (#10796)                               |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                   |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                         |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                 |
-| [a888198c2](https://github.com/apache/airflow/commit/a888198c27bcdbc4538c02360c308ffcaca182fa) | 2020-09-27  | Allow overrides for pod_template_file (#11162)                                               |
-| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26  | Increasing type coverage for multiple provider (#11159)                                      |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                               |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                           |
-| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21  | Add D204 pydocstyle check (#11031)                                                           |
-| [cba51d49e](https://github.com/apache/airflow/commit/cba51d49eea6a0563044191c8111978836d697ef) | 2020-09-17  | Simplify the K8sExecutor and K8sPodOperator (#10393)                                         |
-| [1294e15d4](https://github.com/apache/airflow/commit/1294e15d44c08498e7f1022fdd6f0bc5e50e533f) | 2020-09-16  | KubernetesPodOperator template fix (#10963)                                                  |
-| [5d6d5a2f7](https://github.com/apache/airflow/commit/5d6d5a2f7d330c83297e1dc35728a0ba803aa866) | 2020-09-14  | Allow to specify path to kubeconfig in KubernetesHook (#10453)                               |
-| [7edfac957](https://github.com/apache/airflow/commit/7edfac957bc17c9abcdcfe8d524772bd2783ac5a) | 2020-09-09  | Add connection caching to KubernetesHook (#10447)                                            |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                             |
-| [90c150568](https://github.com/apache/airflow/commit/90c1505686b063332dba87c0c948a8b29d8fd1d4) | 2020-09-04  | Make grace_period_seconds option on K8sPodOperator (#10727)                                  |
-| [338b412c0](https://github.com/apache/airflow/commit/338b412c04abc3fef8126f9724b448d1a9fd0bbc) | 2020-09-02  | Add on_kill support for the KubernetesPodOperator (#10666)                                   |
-| [596bc1337](https://github.com/apache/airflow/commit/596bc1337988f9377571295ddb748ef8703c19c0) | 2020-08-31  | Adds &#39;cncf.kubernetes&#39; package back to backport provider packages. (#10659)                  |
-| [1e5aa4465](https://github.com/apache/airflow/commit/1e5aa4465c5ef8f05745bda64da62fe542f2fe28) | 2020-08-26  | Spark-on-K8S sensor - add driver logs (#10023)                                               |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                  |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                               |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                         |
-| [8cd2be9e1](https://github.com/apache/airflow/commit/8cd2be9e161635480581a0dc723b69ed24166f8d) | 2020-08-11  | Fix KubernetesPodOperator reattachment (#10230)                                              |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                  |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)                         |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                         |
-| [f1fd3e2c4](https://github.com/apache/airflow/commit/f1fd3e2c453ddce3e87ce63787598fea0707ffcf) | 2020-07-31  | Fix typo on reattach property of kubernetespodoperator (#10056)                              |
-| [03c435174](https://github.com/apache/airflow/commit/03c43517445019081c55b4ac5fad3b0debdee336) | 2020-07-31  | Allow `image` in `KubernetesPodOperator` to be templated (#10068)                            |
-| [88c160306](https://github.com/apache/airflow/commit/88c1603060fd484d4145bc253c0dc0e6797e13dd) | 2020-07-31  | Improve docstring note about GKEStartPodOperator on KubernetesPodOperator (#10049)           |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                            |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                  |
-| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22  | More strict rules in mypy (#9705) (#9906)                                                    |
-| [719ae2bf6](https://github.com/apache/airflow/commit/719ae2bf6227894c3e926f717eb4dc669549d615) | 2020-07-22  | Dump Pod as YAML in logs for KubernetesPodOperator (#9895)                                   |
-| [840799d55](https://github.com/apache/airflow/commit/840799d5597f0d005e1deec154f6c95bad6dce61) | 2020-07-20  | Improve KubernetesPodOperator guide (#9079)                                                  |
-| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06  | Upgrade to latest pre-commit checks (#9686)                                                  |
-| [8bd15ef63](https://github.com/apache/airflow/commit/8bd15ef634cca40f3cf6ca3442262f3e05144512) | 2020-07-01  | Switches to Helm Chart for Kubernetes tests (#9468)                                          |
-| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18  | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) |
-| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                               |
-| [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23  | Fix typo in test_project_structure (#8978)                                                   |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                 |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                      |
-| [8985df0bf](https://github.com/apache/airflow/commit/8985df0bfcb5f2b2cd69a21b9814021f9f8ce953) | 2020-05-16  | Monitor pods by labels instead of names (#6377)                                              |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                 |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                      |
-| [f82ad452b](https://github.com/apache/airflow/commit/f82ad452b0f4ebd1428bc9669641a632dc87bb8c) | 2020-05-15  | Fix KubernetesPodOperator pod name length validation (#8829)                                 |
-| [1ccafc617](https://github.com/apache/airflow/commit/1ccafc617c4cb9622e3460ad7c190f3ee67c3b32) | 2020-04-02  | Add spark_kubernetes system test (#7875)                                                     |
-| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23  | Add missing call to Super class in &#39;cncf&#39; &amp; &#39;docker&#39; providers (#7825)                       |
-| [6c39a3bf9](https://github.com/apache/airflow/commit/6c39a3bf97414ba2438669894db65c36ccbeb61a) | 2020-03-10  | [AIRFLOW-6542] Add spark-on-k8s operator/hook/sensor (#7163)                                 |
-| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07  | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506)                             |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)             |
-| [0ec277412](https://github.com/apache/airflow/commit/0ec2774120d43fa667a371b384e6006e1d1c7821) | 2020-02-24  | [AIRFLOW-5629] Implement Kubernetes priorityClassName in KubernetesPodOperator (#7395)       |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412)     |
-| [967930c0c](https://github.com/apache/airflow/commit/967930c0cb6e2293f2a49e5c9add5aa1917f3527) | 2020-02-11  | [AIRFLOW-5413] Allow K8S worker pod to be configured from JSON/YAML file (#6230)             |
-| [96f834389](https://github.com/apache/airflow/commit/96f834389e03884025534fabd862155061f53fd0) | 2020-02-03  | [AIRFLOW-6678] Pull event logs from Kubernetes (#7292)                                       |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                     |
-| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02  | [AIRFLOW-6708] Set unique logger names (#7330)                                               |
-| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30  | [AIRFLOW-6682] Move GCP classes to providers package (#7295)                                 |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                           |
-| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21  | [AIRFLOW-6610] Move software classes to providers package (#7231)                            |
diff --git a/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
index 20dd200..bf06b91 100644
--- a/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
+++ b/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
@@ -4,7 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
 | [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
 | [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
 | [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
diff --git a/airflow/providers/databricks/BACKPORT_PROVIDER_README.md b/airflow/providers/databricks/BACKPORT_PROVIDER_README.md
index e86909a..e84c8bf 100644
--- a/airflow/providers/databricks/BACKPORT_PROVIDER_README.md
+++ b/airflow/providers/databricks/BACKPORT_PROVIDER_README.md
@@ -98,7 +98,7 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
 | [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
 | [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
 | [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/databricks/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/databricks/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/databricks/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/databricks/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/databricks/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 37c6c75..0000000
--- a/airflow/providers/databricks/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,59 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                                                                                                   |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)                                                                                        |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [b02722313](https://github.com/apache/airflow/commit/b0272231320a4975cc39968dec8f0abf7a5cca11) | 2020-11-13  | Add install/uninstall api to databricks hook (#12316)                                                                                                              |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [7e0d08e1f](https://github.com/apache/airflow/commit/7e0d08e1f074871307f0eb9e9ae7a66f7ce67626) | 2020-11-09  | Add how-to Guide for Databricks operators (#12175)                                                                                                                 |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27  | Increase type coverage for five different providers (#11170)                                                                                                       |
-| [966a06d96](https://github.com/apache/airflow/commit/966a06d96bbfe330f1d2825f7b7eaa16d43b7a00) | 2020-09-18  | Fetching databricks host from connection if not supplied in extras. (#10762)                                                                                       |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [bfefcce0c](https://github.com/apache/airflow/commit/bfefcce0c9f273042dd79ff50eb9af032ecacf59) | 2020-08-25  | Updated REST API call so GET requests pass payload in query string instead of request body (#10462)                                                                |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                                                                                                     |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                                                                                        |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21  | Enable &amp; Fix Whitespace related PyDocStyle Checks (#9458)                                                                                                          |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [f1073381e](https://github.com/apache/airflow/commit/f1073381ed764a218b2502d15ca28a5b326f9f2d) | 2020-05-22  | Add support for spark python and submit tasks in Databricks operator(#8846)                                                                                        |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [649935e8c](https://github.com/apache/airflow/commit/649935e8ce906759fdd08884ab1e3db0a03f6953) | 2020-04-27  | [AIRFLOW-8472]: `PATCH` for Databricks hook `_do_api_call` (#8473)                                                                                                 |
-| [16903ba3a](https://github.com/apache/airflow/commit/16903ba3a6ee5e61f1c6b5d17a8c6cf3c3a9a7f6) | 2020-04-24  | [AIRFLOW-8474]: Adding possibility to get job_id from Databricks run (#8475)                                                                                       |
-| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23  | Add missing call to Super class in &#39;amazon&#39;, &#39;cloudant &amp; &#39;databricks&#39; providers (#7827)                                                                            |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)                                                                                           |
diff --git a/airflow/providers/databricks/README.md b/airflow/providers/databricks/README.md
deleted file mode 100644
index c4c2bc1..0000000
--- a/airflow/providers/databricks/README.md
+++ /dev/null
@@ -1,153 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-databricks
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [Moved operators](#moved-operators)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `databricks` provider. All classes for this provider package
-are in `airflow.providers.databricks` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-databricks`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| requests      | &gt;=2.20.0, &lt;3       |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `databricks` provider
-are in the `airflow.providers.databricks` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.databricks` package                                                                                          | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                              |
-|:-------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.databricks.DatabricksRunNowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/databricks/operators/databricks.py)    | [contrib.operators.databricks_operator.DatabricksRunNowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/databricks_operator.py)    |
-| [operators.databricks.DatabricksSubmitRunOperator](https://github.com/apache/airflow/blob/master/airflow/providers/databricks/operators/databricks.py) | [contrib.operators.databricks_operator.DatabricksSubmitRunOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/databricks_operator.py) |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.databricks` package                                                                         | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                 |
-|:----------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------|
-| [hooks.databricks.DatabricksHook](https://github.com/apache/airflow/blob/master/airflow/providers/databricks/hooks/databricks.py) | [contrib.hooks.databricks_hook.DatabricksHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/databricks_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                                                                                                   |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)                                                                                        |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [b02722313](https://github.com/apache/airflow/commit/b0272231320a4975cc39968dec8f0abf7a5cca11) | 2020-11-13  | Add install/uninstall api to databricks hook (#12316)                                                                                                              |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [7e0d08e1f](https://github.com/apache/airflow/commit/7e0d08e1f074871307f0eb9e9ae7a66f7ce67626) | 2020-11-09  | Add how-to Guide for Databricks operators (#12175)                                                                                                                 |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27  | Increase type coverage for five different providers (#11170)                                                                                                       |
-| [966a06d96](https://github.com/apache/airflow/commit/966a06d96bbfe330f1d2825f7b7eaa16d43b7a00) | 2020-09-18  | Fetching databricks host from connection if not supplied in extras. (#10762)                                                                                       |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [bfefcce0c](https://github.com/apache/airflow/commit/bfefcce0c9f273042dd79ff50eb9af032ecacf59) | 2020-08-25  | Updated REST API call so GET requests pass payload in query string instead of request body (#10462)                                                                |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                                                                                                     |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                                                                                        |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21  | Enable &amp; Fix Whitespace related PyDocStyle Checks (#9458)                                                                                                          |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [f1073381e](https://github.com/apache/airflow/commit/f1073381ed764a218b2502d15ca28a5b326f9f2d) | 2020-05-22  | Add support for spark python and submit tasks in Databricks operator(#8846)                                                                                        |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [649935e8c](https://github.com/apache/airflow/commit/649935e8ce906759fdd08884ab1e3db0a03f6953) | 2020-04-27  | [AIRFLOW-8472]: `PATCH` for Databricks hook `_do_api_call` (#8473)                                                                                                 |
-| [16903ba3a](https://github.com/apache/airflow/commit/16903ba3a6ee5e61f1c6b5d17a8c6cf3c3a9a7f6) | 2020-04-24  | [AIRFLOW-8474]: Adding possibility to get job_id from Databricks run (#8475)                                                                                       |
-| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23  | Add missing call to Super class in &#39;amazon&#39;, &#39;cloudant &amp; &#39;databricks&#39; providers (#7827)                                                                            |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)                                                                                           |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/datadog/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/datadog/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/datadog/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/datadog/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/datadog/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 4fb2934..0000000
--- a/airflow/providers/datadog/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,44 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)           |
-| [89fe5feb7](https://github.com/apache/airflow/commit/89fe5feb7b8088db7b5aaaec2b7a292cbc507209) | 2020-07-20  | improve typing for datadog provider (#9775)                                    |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                               |
-| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23  | Add missing call to Super class in remaining providers (#7828)                 |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)       |
diff --git a/airflow/providers/datadog/README.md b/airflow/providers/datadog/README.md
deleted file mode 100644
index ff24ee3..0000000
--- a/airflow/providers/datadog/README.md
+++ /dev/null
@@ -1,137 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-datadog
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Sensors](#sensors)
-        - [Moved sensors](#moved-sensors)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `datadog` provider. All classes for this provider package
-are in `airflow.providers.datadog` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-datadog`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| datadog       | &gt;=0.14.0           |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `datadog` provider
-are in the `airflow.providers.datadog` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Sensors
-
-
-
-### Moved sensors
-
-| Airflow 2.0 sensors: `airflow.providers.datadog` package                                                                    | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                  |
-|:----------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------|
-| [sensors.datadog.DatadogSensor](https://github.com/apache/airflow/blob/master/airflow/providers/datadog/sensors/datadog.py) | [contrib.sensors.datadog_sensor.DatadogSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/datadog_sensor.py) |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.datadog` package                                                                | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                        |
-|:----------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------|
-| [hooks.datadog.DatadogHook](https://github.com/apache/airflow/blob/master/airflow/providers/datadog/hooks/datadog.py) | [contrib.hooks.datadog_hook.DatadogHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/datadog_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)           |
-| [89fe5feb7](https://github.com/apache/airflow/commit/89fe5feb7b8088db7b5aaaec2b7a292cbc507209) | 2020-07-20  | improve typing for datadog provider (#9775)                                    |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                               |
-| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23  | Add missing call to Super class in remaining providers (#7828)                 |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)       |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/dingding/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/dingding/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/dingding/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/dingding/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/dingding/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index e6d3c58..0000000
--- a/airflow/providers/dingding/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,55 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                                                                                                   |
-| [de15aa30d](https://github.com/apache/airflow/commit/de15aa30d476411379e33bc9d5ce4cf0544e858c) | 2020-11-22  | Deprecate Read the Docs (#12541)                                                                                                                                   |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [3a72fc824](https://github.com/apache/airflow/commit/3a72fc82475df3b745a00a7b5e34eef9d27b3329) | 2020-11-14  | Fix Description of Provider Docs (#12361)                                                                                                                          |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                                                                                               |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [1dc709931](https://github.com/apache/airflow/commit/1dc7099315fce0ab3765fbfdde43f44500df08b7) | 2020-11-03  | Fixes import of BaseOperator in dinging (#12063)                                                                                                                   |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [172820db4](https://github.com/apache/airflow/commit/172820db4d2009dd26fa8aef4a864fb8a3d7e78d) | 2020-10-21  | Fix case of GitHub (#11398)                                                                                                                                        |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02  | Strict type check for multiple providers (#11229)                                                                                                                  |
-| [ce19657ec](https://github.com/apache/airflow/commit/ce19657ec685abff5871df80c8d47f8585eeed99) | 2020-09-15  | Fix case of GitHub. (#10955)                                                                                                                                       |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                                                                                        |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                                                                                               |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                                                                                   |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28  | [AIRFLOW-6656] Fix AIP-21 moving (#7272)                                                                                                                           |
-| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27  | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268)                                                                                         |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)                                                                                           |
diff --git a/airflow/providers/dingding/README.md b/airflow/providers/dingding/README.md
deleted file mode 100644
index 409585e..0000000
--- a/airflow/providers/dingding/README.md
+++ /dev/null
@@ -1,157 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-dingding
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [Cross provider package dependencies](#cross-provider-package-dependencies)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [Moved operators](#moved-operators)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `dingding` provider. All classes for this provider package
-are in `airflow.providers.dingding` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-dingding`
-
-## Cross provider package dependencies
-
-Those are dependencies that might be needed in order to use all the features of the package.
-You need to install the specified backport providers package in order to use them.
-
-You can install such cross-provider dependencies when installing from PyPI. For example:
-
-```bash
-pip install apache-airflow-providers-dingding[http]
-```
-
-| Dependent package                                                                       | Extra   |
-|:----------------------------------------------------------------------------------------|:--------|
-| [apache-airflow-providers-http](https://pypi.org/project/apache-airflow-providers-http) | http    |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `dingding` provider
-are in the `airflow.providers.dingding` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.dingding` package                                                                           | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                               |
-|:--------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.dingding.DingdingOperator](https://github.com/apache/airflow/blob/master/airflow/providers/dingding/operators/dingding.py) | [contrib.operators.dingding_operator.DingdingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dingding_operator.py) |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.dingding` package                                                                   | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                           |
-|:--------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------|
-| [hooks.dingding.DingdingHook](https://github.com/apache/airflow/blob/master/airflow/providers/dingding/hooks/dingding.py) | [contrib.hooks.dingding_hook.DingdingHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/dingding_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                                                                                                   |
-| [de15aa30d](https://github.com/apache/airflow/commit/de15aa30d476411379e33bc9d5ce4cf0544e858c) | 2020-11-22  | Deprecate Read the Docs (#12541)                                                                                                                                   |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [3a72fc824](https://github.com/apache/airflow/commit/3a72fc82475df3b745a00a7b5e34eef9d27b3329) | 2020-11-14  | Fix Description of Provider Docs (#12361)                                                                                                                          |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                                                                                               |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [1dc709931](https://github.com/apache/airflow/commit/1dc7099315fce0ab3765fbfdde43f44500df08b7) | 2020-11-03  | Fixes import of BaseOperator in dinging (#12063)                                                                                                                   |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [172820db4](https://github.com/apache/airflow/commit/172820db4d2009dd26fa8aef4a864fb8a3d7e78d) | 2020-10-21  | Fix case of GitHub (#11398)                                                                                                                                        |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02  | Strict type check for multiple providers (#11229)                                                                                                                  |
-| [ce19657ec](https://github.com/apache/airflow/commit/ce19657ec685abff5871df80c8d47f8585eeed99) | 2020-09-15  | Fix case of GitHub. (#10955)                                                                                                                                       |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                                                                                        |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                                                                                               |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                                                                                   |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28  | [AIRFLOW-6656] Fix AIP-21 moving (#7272)                                                                                                                           |
-| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27  | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268)                                                                                         |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)                                                                                           |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/discord/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/discord/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/discord/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/discord/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/discord/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index e85fc42..0000000
--- a/airflow/providers/discord/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,45 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [e9b2ff57b](https://github.com/apache/airflow/commit/e9b2ff57b81b12cfbf559d957a370d497015acc2) | 2020-12-05  | Add notes about PIP 20.3 breaking Airflow installation (#12840)                |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                    |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                   |
-| [a518801f8](https://github.com/apache/airflow/commit/a518801f8d5abe4ceb8b8678c27e6858f51f288a) | 2020-07-12  | Add type hinting for discord provider (#9773)                                  |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28  | [AIRFLOW-6656] Fix AIP-21 moving (#7272)                                       |
-| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27  | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268)     |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)       |
diff --git a/airflow/providers/discord/README.md b/airflow/providers/discord/README.md
deleted file mode 100644
index 40230df..0000000
--- a/airflow/providers/discord/README.md
+++ /dev/null
@@ -1,147 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-discord
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [Cross provider package dependencies](#cross-provider-package-dependencies)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [Moved operators](#moved-operators)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `discord` provider. All classes for this provider package
-are in `airflow.providers.discord` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-discord`
-
-## Cross provider package dependencies
-
-Those are dependencies that might be needed in order to use all the features of the package.
-You need to install the specified backport providers package in order to use them.
-
-You can install such cross-provider dependencies when installing from PyPI. For example:
-
-```bash
-pip install apache-airflow-providers-discord[http]
-```
-
-| Dependent package                                                                       | Extra   |
-|:----------------------------------------------------------------------------------------|:--------|
-| [apache-airflow-providers-http](https://pypi.org/project/apache-airflow-providers-http) | http    |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `discord` provider
-are in the `airflow.providers.discord` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.discord` package                                                                                               | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                   |
-|:---------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.discord_webhook.DiscordWebhookOperator](https://github.com/apache/airflow/blob/master/airflow/providers/discord/operators/discord_webhook.py) | [contrib.operators.discord_webhook_operator.DiscordWebhookOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/discord_webhook_operator.py) |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.discord` package                                                                                       | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                               |
-|:---------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [hooks.discord_webhook.DiscordWebhookHook](https://github.com/apache/airflow/blob/master/airflow/providers/discord/hooks/discord_webhook.py) | [contrib.hooks.discord_webhook_hook.DiscordWebhookHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/discord_webhook_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [e9b2ff57b](https://github.com/apache/airflow/commit/e9b2ff57b81b12cfbf559d957a370d497015acc2) | 2020-12-05  | Add notes about PIP 20.3 breaking Airflow installation (#12840)                |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                    |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                   |
-| [a518801f8](https://github.com/apache/airflow/commit/a518801f8d5abe4ceb8b8678c27e6858f51f288a) | 2020-07-12  | Add type hinting for discord provider (#9773)                                  |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28  | [AIRFLOW-6656] Fix AIP-21 moving (#7272)                                       |
-| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27  | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268)     |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)       |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/docker/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/docker/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/docker/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/docker/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/docker/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 735477d..0000000
--- a/airflow/providers/docker/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,66 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [6b339c70c](https://github.com/apache/airflow/commit/6b339c70c45a2bad0e1e2c3f6638f4c59475569e) | 2020-12-03  | Avoid log spam &amp; have more meaningful log when pull image in DockerOperator (#12763)                                                                               |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                                                                                                 |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [0314a3a21](https://github.com/apache/airflow/commit/0314a3a218f864f78ec260cc66134e7acae34bc5) | 2020-11-01  | Allow airflow.providers to be installed in multiple python folders (#10806)                                                                                        |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                                                                                                     |
-| [2e56ee7b2](https://github.com/apache/airflow/commit/2e56ee7b2283d9413cab6939ffbe241c154b39e2) | 2020-08-27  | DockerOperator extra_hosts argument support added (#10546)                                                                                                         |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                                                                                                     |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                                                                                        |
-| [d79e7221d](https://github.com/apache/airflow/commit/d79e7221de76f01b5cd36c15224b59e8bb451c90) | 2020-08-06  | Type annotation for Docker operator (#9733)                                                                                                                        |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                                                                                               |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22  | More strict rules in mypy (#9705) (#9906)                                                                                                                          |
-| [5d61580c5](https://github.com/apache/airflow/commit/5d61580c572118ed97b9ff32d7e3684be1fcb755) | 2020-06-21  | Enable &#39;Public function Missing Docstrings&#39; PyDocStyle Check (#9463)                                                                                               |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [4a74cf1a3](https://github.com/apache/airflow/commit/4a74cf1a34cf20e49383f27e7cdc3ae80b9b0cde) | 2020-06-08  | Fix xcom in DockerOperator when auto_remove is used (#9173)                                                                                                        |
-| [b4b84a193](https://github.com/apache/airflow/commit/b4b84a1933d055a2803b80b990482a7257a203ff) | 2020-06-07  | Add kernel capabilities in DockerOperator(#9142)                                                                                                                   |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [511d98e30](https://github.com/apache/airflow/commit/511d98e30ded2bcce9d246b358f806cea45ebcb7) | 2020-05-01  | [AIRFLOW-4363] Fix JSON encoding error (#8287)                                                                                                                     |
-| [0a1de1668](https://github.com/apache/airflow/commit/0a1de16682da1d0a3fac668437434a72b3149fda) | 2020-04-27  | Stop DockerSwarmOperator from pulling Docker images (#8533)                                                                                                        |
-| [3237c7e31](https://github.com/apache/airflow/commit/3237c7e31d008f73e6ba0ecc1f2331c7c80f0e17) | 2020-04-26  | [AIRFLOW-5850] Capture task logs in DockerSwarmOperator (#6552)                                                                                                    |
-| [9626b03d1](https://github.com/apache/airflow/commit/9626b03d19905c6d1bfbd53064f85ffd3c39f0bf) | 2020-03-30  | [AIRFLOW-6574] Adding private_environment to docker operator. (#7671)                                                                                              |
-| [733d3d3c3](https://github.com/apache/airflow/commit/733d3d3c32e0305691f82102cfc346e8e85478b0) | 2020-03-25  | [AIRFLOW-4363] Fix JSON encoding error (#7628)                                                                                                                     |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                                                                                   |
-| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23  | Add missing call to Super class in &#39;cncf&#39; &amp; &#39;docker&#39; providers (#7825)                                                                                             |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [dbcd3d878](https://github.com/apache/airflow/commit/dbcd3d8787741fd8203b6d9bdbc5d1da4b10a15b) | 2020-02-18  | [AIRFLOW-6804] Add the basic test for all example DAGs (#7419)                                                                                                     |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412)                                                                           |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21  | [AIRFLOW-6610] Move software classes to providers package (#7231)                                                                                                  |
diff --git a/airflow/providers/docker/README.md b/airflow/providers/docker/README.md
deleted file mode 100644
index efbd747..0000000
--- a/airflow/providers/docker/README.md
+++ /dev/null
@@ -1,160 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-docker
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [Moved operators](#moved-operators)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `docker` provider. All classes for this provider package
-are in `airflow.providers.docker` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-docker`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| docker        | ~=3.0              |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `docker` provider
-are in the `airflow.providers.docker` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.docker` package                                                                                      | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                          |
-|:-----------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.docker.DockerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/docker/operators/docker.py)                  | [operators.docker_operator.DockerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/docker_operator.py)                                  |
-| [operators.docker_swarm.DockerSwarmOperator](https://github.com/apache/airflow/blob/master/airflow/providers/docker/operators/docker_swarm.py) | [contrib.operators.docker_swarm_operator.DockerSwarmOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/docker_swarm_operator.py) |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.docker` package                                                             | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                     |
-|:------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------|
-| [hooks.docker.DockerHook](https://github.com/apache/airflow/blob/master/airflow/providers/docker/hooks/docker.py) | [hooks.docker_hook.DockerHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/docker_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [6b339c70c](https://github.com/apache/airflow/commit/6b339c70c45a2bad0e1e2c3f6638f4c59475569e) | 2020-12-03  | Avoid log spam &amp; have more meaningful log when pull image in DockerOperator (#12763)                                                                               |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                                                                                                 |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [0314a3a21](https://github.com/apache/airflow/commit/0314a3a218f864f78ec260cc66134e7acae34bc5) | 2020-11-01  | Allow airflow.providers to be installed in multiple python folders (#10806)                                                                                        |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                                                                                                     |
-| [2e56ee7b2](https://github.com/apache/airflow/commit/2e56ee7b2283d9413cab6939ffbe241c154b39e2) | 2020-08-27  | DockerOperator extra_hosts argument support added (#10546)                                                                                                         |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                                                                                                     |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                                                                                        |
-| [d79e7221d](https://github.com/apache/airflow/commit/d79e7221de76f01b5cd36c15224b59e8bb451c90) | 2020-08-06  | Type annotation for Docker operator (#9733)                                                                                                                        |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                                                                                               |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22  | More strict rules in mypy (#9705) (#9906)                                                                                                                          |
-| [5d61580c5](https://github.com/apache/airflow/commit/5d61580c572118ed97b9ff32d7e3684be1fcb755) | 2020-06-21  | Enable &#39;Public function Missing Docstrings&#39; PyDocStyle Check (#9463)                                                                                               |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [4a74cf1a3](https://github.com/apache/airflow/commit/4a74cf1a34cf20e49383f27e7cdc3ae80b9b0cde) | 2020-06-08  | Fix xcom in DockerOperator when auto_remove is used (#9173)                                                                                                        |
-| [b4b84a193](https://github.com/apache/airflow/commit/b4b84a1933d055a2803b80b990482a7257a203ff) | 2020-06-07  | Add kernel capabilities in DockerOperator(#9142)                                                                                                                   |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [511d98e30](https://github.com/apache/airflow/commit/511d98e30ded2bcce9d246b358f806cea45ebcb7) | 2020-05-01  | [AIRFLOW-4363] Fix JSON encoding error (#8287)                                                                                                                     |
-| [0a1de1668](https://github.com/apache/airflow/commit/0a1de16682da1d0a3fac668437434a72b3149fda) | 2020-04-27  | Stop DockerSwarmOperator from pulling Docker images (#8533)                                                                                                        |
-| [3237c7e31](https://github.com/apache/airflow/commit/3237c7e31d008f73e6ba0ecc1f2331c7c80f0e17) | 2020-04-26  | [AIRFLOW-5850] Capture task logs in DockerSwarmOperator (#6552)                                                                                                    |
-| [9626b03d1](https://github.com/apache/airflow/commit/9626b03d19905c6d1bfbd53064f85ffd3c39f0bf) | 2020-03-30  | [AIRFLOW-6574] Adding private_environment to docker operator. (#7671)                                                                                              |
-| [733d3d3c3](https://github.com/apache/airflow/commit/733d3d3c32e0305691f82102cfc346e8e85478b0) | 2020-03-25  | [AIRFLOW-4363] Fix JSON encoding error (#7628)                                                                                                                     |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                                                                                   |
-| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23  | Add missing call to Super class in &#39;cncf&#39; &amp; &#39;docker&#39; providers (#7825)                                                                                             |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [dbcd3d878](https://github.com/apache/airflow/commit/dbcd3d8787741fd8203b6d9bdbc5d1da4b10a15b) | 2020-02-18  | [AIRFLOW-6804] Add the basic test for all example DAGs (#7419)                                                                                                     |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412)                                                                           |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21  | [AIRFLOW-6610] Move software classes to providers package (#7231)                                                                                                  |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/elasticsearch/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/elasticsearch/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/elasticsearch/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/elasticsearch/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/elasticsearch/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index e63084e..0000000
--- a/airflow/providers/elasticsearch/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,48 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                               |
-|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                       |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                  |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                    |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                            |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                            |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                        |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                               |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)        |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                    |
-| [61feb6ec4](https://github.com/apache/airflow/commit/61feb6ec453f8dda1a0e1fe3ebcc0f1e3224b634) | 2020-11-09  | Provider&#39;s readmes generated for elasticsearch and google packages (#12194)           |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                       |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                         |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                     |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                            |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                    |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                    |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                          |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                            |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                  |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                          |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                    |
-| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08  | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499) |
-| [70f05ac67](https://github.com/apache/airflow/commit/70f05ac6775152d856d212f845e9561282232844) | 2020-09-01  | Add `log_id` field to log lines on ES handler (#10411)                                |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                           |
-| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25  | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)                |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                               |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                            |
-| [d5d119bab](https://github.com/apache/airflow/commit/d5d119babc97bbe3f3f690ad4a93e3b73bd3b172) | 2020-07-21  | Increase typing coverage for Elasticsearch (#9911)                                    |
-| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06  | Move provider&#39;s log task handlers to the provider package (#9604)                     |
-| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21  | Enable &amp; Fix Whitespace related PyDocStyle Checks (#9458)                             |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                        |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                            |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                           |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                         |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                           |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                          |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                               |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                          |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)               |
-| [65dd28eb7](https://github.com/apache/airflow/commit/65dd28eb77d996ec8306c67d5ce1ccee2c14cc9d) | 2020-02-18  | [AIRFLOW-1202] Create Elasticsearch Hook (#7358)                                      |
diff --git a/airflow/providers/elasticsearch/README.md b/airflow/providers/elasticsearch/README.md
deleted file mode 100644
index 5b78d03..0000000
--- a/airflow/providers/elasticsearch/README.md
+++ /dev/null
@@ -1,130 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-elasticsearch
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Hooks](#hooks)
-        - [New hooks](#new-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `elasticsearch` provider. All classes for this provider package
-are in `airflow.providers.elasticsearch` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-elasticsearch`
-
-## PIP requirements
-
-| PIP package         | Version required   |
-|:--------------------|:-------------------|
-| elasticsearch       | &gt;7, &lt;7.6.0         |
-| elasticsearch-dbapi | ==0.1.0            |
-| elasticsearch-dsl   | &gt;=5.0.0            |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `elasticsearch` provider
-are in the `airflow.providers.elasticsearch` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Hooks
-
-
-### New hooks
-
-| New Airflow 2.0 hooks: `airflow.providers.elasticsearch` package                                                                              |
-|:----------------------------------------------------------------------------------------------------------------------------------------------|
-| [hooks.elasticsearch.ElasticsearchHook](https://github.com/apache/airflow/blob/master/airflow/providers/elasticsearch/hooks/elasticsearch.py) |
-
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                               |
-|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                       |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                  |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                    |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                            |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                            |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                        |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                               |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)        |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                    |
-| [61feb6ec4](https://github.com/apache/airflow/commit/61feb6ec453f8dda1a0e1fe3ebcc0f1e3224b634) | 2020-11-09  | Provider&#39;s readmes generated for elasticsearch and google packages (#12194)           |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                       |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                         |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                     |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                            |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                    |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                    |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                          |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                            |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                  |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                          |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                    |
-| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08  | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499) |
-| [70f05ac67](https://github.com/apache/airflow/commit/70f05ac6775152d856d212f845e9561282232844) | 2020-09-01  | Add `log_id` field to log lines on ES handler (#10411)                                |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                           |
-| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25  | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)                |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                               |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                            |
-| [d5d119bab](https://github.com/apache/airflow/commit/d5d119babc97bbe3f3f690ad4a93e3b73bd3b172) | 2020-07-21  | Increase typing coverage for Elasticsearch (#9911)                                    |
-| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06  | Move provider&#39;s log task handlers to the provider package (#9604)                     |
-| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21  | Enable &amp; Fix Whitespace related PyDocStyle Checks (#9458)                             |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                        |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                            |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                           |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                         |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                           |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                          |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                               |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                          |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)               |
-| [65dd28eb7](https://github.com/apache/airflow/commit/65dd28eb77d996ec8306c67d5ce1ccee2c14cc9d) | 2020-02-18  | [AIRFLOW-1202] Create Elasticsearch Hook (#7358)                                      |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/exasol/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/exasol/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/exasol/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/exasol/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/exasol/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index f2e0d34..0000000
--- a/airflow/providers/exasol/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,47 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)             |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24  | Use Python 3 style super classes (#11806)                                      |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10  | Remove redundant parentheses from Python files (#10967)                        |
-| [7b0a2f5d8](https://github.com/apache/airflow/commit/7b0a2f5d8e6c3ff17094a7c1e31440300defb0b7) | 2020-10-10  | Replaced basestring with str in the Exasol hook (#11360)                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27  | Increase type coverage for five different providers (#11170)                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12  | DbApiHook: Support kwargs in get_pandas_df (#9730)                             |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)           |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                 |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [69dc91b4e](https://github.com/apache/airflow/commit/69dc91b4ef92d0f89abe097afd27bbe7ec2febd0) | 2020-04-02  | [AIRFLOW-6982] add native python exasol support (#7621)                        |
diff --git a/airflow/providers/exasol/README.md b/airflow/providers/exasol/README.md
deleted file mode 100644
index 07e137f..0000000
--- a/airflow/providers/exasol/README.md
+++ /dev/null
@@ -1,140 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-exasol
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [New operators](#new-operators)
-    - [Hooks](#hooks)
-        - [New hooks](#new-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `exasol` provider. All classes for this provider package
-are in `airflow.providers.exasol` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-exasol`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| pyexasol      | &gt;=0.5.1,&lt;1.0.0     |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `exasol` provider
-are in the `airflow.providers.exasol` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-### New operators
-
-| New Airflow 2.0 operators: `airflow.providers.exasol` package                                                                 |
-|:------------------------------------------------------------------------------------------------------------------------------|
-| [operators.exasol.ExasolOperator](https://github.com/apache/airflow/blob/master/airflow/providers/exasol/operators/exasol.py) |
-
-
-
-## Hooks
-
-
-### New hooks
-
-| New Airflow 2.0 hooks: `airflow.providers.exasol` package                                                         |
-|:------------------------------------------------------------------------------------------------------------------|
-| [hooks.exasol.ExasolHook](https://github.com/apache/airflow/blob/master/airflow/providers/exasol/hooks/exasol.py) |
-
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)             |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24  | Use Python 3 style super classes (#11806)                                      |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10  | Remove redundant parentheses from Python files (#10967)                        |
-| [7b0a2f5d8](https://github.com/apache/airflow/commit/7b0a2f5d8e6c3ff17094a7c1e31440300defb0b7) | 2020-10-10  | Replaced basestring with str in the Exasol hook (#11360)                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27  | Increase type coverage for five different providers (#11170)                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12  | DbApiHook: Support kwargs in get_pandas_df (#9730)                             |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)           |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                 |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [69dc91b4e](https://github.com/apache/airflow/commit/69dc91b4ef92d0f89abe097afd27bbe7ec2febd0) | 2020-04-02  | [AIRFLOW-6982] add native python exasol support (#7621)                        |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/facebook/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/facebook/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/facebook/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/facebook/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/facebook/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 27a9319..0000000
--- a/airflow/providers/facebook/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,43 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                               |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25  | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)         |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ef8df1734](https://github.com/apache/airflow/commit/ef8df17348e3c567e2d2f0aface641acae3896ba) | 2020-08-22  | Fix typo in Facebook Ads Provider (#10484)                                     |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03  | Add system test and docs for Facebook Ads operators (#8503)                    |
-| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14  | Added Facebook Ads Operator #7887 (#8008)                                      |
diff --git a/airflow/providers/facebook/README.md b/airflow/providers/facebook/README.md
deleted file mode 100644
index c6d2454..0000000
--- a/airflow/providers/facebook/README.md
+++ /dev/null
@@ -1,123 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-facebook
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Hooks](#hooks)
-        - [New hooks](#new-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `facebook` provider. All classes for this provider package
-are in `airflow.providers.facebook` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-facebook`
-
-## PIP requirements
-
-| PIP package       | Version required   |
-|:------------------|:-------------------|
-| facebook-business | &gt;=6.0.2            |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `facebook` provider
-are in the `airflow.providers.facebook` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Hooks
-
-
-### New hooks
-
-| New Airflow 2.0 hooks: `airflow.providers.facebook` package                                                                         |
-|:------------------------------------------------------------------------------------------------------------------------------------|
-| [ads.hooks.ads.FacebookAdsReportingHook](https://github.com/apache/airflow/blob/master/airflow/providers/facebook/ads/hooks/ads.py) |
-
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                               |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25  | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)         |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ef8df1734](https://github.com/apache/airflow/commit/ef8df17348e3c567e2d2f0aface641acae3896ba) | 2020-08-22  | Fix typo in Facebook Ads Provider (#10484)                                     |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03  | Add system test and docs for Facebook Ads operators (#8503)                    |
-| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14  | Added Facebook Ads Operator #7887 (#8008)                                      |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/ftp/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/ftp/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/ftp/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/ftp/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/ftp/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 60d2e6b87..0000000
--- a/airflow/providers/ftp/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,48 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [bcdd3bb7b](https://github.com/apache/airflow/commit/bcdd3bb7bb0e73ec957fa4077b025eb5c1fef90d) | 2020-09-24  | Increasing type coverage FTP (#11107)                                          |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [c60e476fb](https://github.com/apache/airflow/commit/c60e476fb24d4fa2eb192f8fce51edea4166f1d0) | 2020-08-25  | Remove mlsd function from hooks/ftp.py (#10538)                                |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)           |
-| [027cc1682](https://github.com/apache/airflow/commit/027cc1682c3b068dfeee143ca538b5e8dadfcd17) | 2020-07-17  | Improve type annotations for Ftp provider (#9868)                              |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)               |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                               |
-| [74c2a6ded](https://github.com/apache/airflow/commit/74c2a6ded4d615de8e1b1c04a25146344138e920) | 2020-03-23  | Add call to Super class in &#39;ftp&#39; &amp; &#39;ssh&#39; providers (#7822)                     |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27  | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268)     |
diff --git a/airflow/providers/ftp/README.md b/airflow/providers/ftp/README.md
deleted file mode 100644
index 07e590b..0000000
--- a/airflow/providers/ftp/README.md
+++ /dev/null
@@ -1,136 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-ftp
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [Provider class summary](#provider-classes-summary)
-    - [Sensors](#sensors)
-        - [Moved sensors](#moved-sensors)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `ftp` provider. All classes for this provider package
-are in `airflow.providers.ftp` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-ftp`
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `ftp` provider
-are in the `airflow.providers.ftp` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Sensors
-
-
-
-### Moved sensors
-
-| Airflow 2.0 sensors: `airflow.providers.ftp` package                                                         | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                       |
-|:-------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------|
-| [sensors.ftp.FTPSSensor](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/sensors/ftp.py) | [contrib.sensors.ftp_sensor.FTPSSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/ftp_sensor.py) |
-| [sensors.ftp.FTPSensor](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/sensors/ftp.py)  | [contrib.sensors.ftp_sensor.FTPSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/ftp_sensor.py)  |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.ftp` package                                                     | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                             |
-|:-------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------|
-| [hooks.ftp.FTPHook](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/hooks/ftp.py)  | [contrib.hooks.ftp_hook.FTPHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/ftp_hook.py)  |
-| [hooks.ftp.FTPSHook](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/hooks/ftp.py) | [contrib.hooks.ftp_hook.FTPSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/ftp_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [bcdd3bb7b](https://github.com/apache/airflow/commit/bcdd3bb7bb0e73ec957fa4077b025eb5c1fef90d) | 2020-09-24  | Increasing type coverage FTP (#11107)                                          |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [c60e476fb](https://github.com/apache/airflow/commit/c60e476fb24d4fa2eb192f8fce51edea4166f1d0) | 2020-08-25  | Remove mlsd function from hooks/ftp.py (#10538)                                |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)           |
-| [027cc1682](https://github.com/apache/airflow/commit/027cc1682c3b068dfeee143ca538b5e8dadfcd17) | 2020-07-17  | Improve type annotations for Ftp provider (#9868)                              |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)               |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                               |
-| [74c2a6ded](https://github.com/apache/airflow/commit/74c2a6ded4d615de8e1b1c04a25146344138e920) | 2020-03-23  | Add call to Super class in &#39;ftp&#39; &amp; &#39;ssh&#39; providers (#7822)                     |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27  | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268)     |
diff --git a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
index 83c5d07..5179b91 100644
--- a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
+++ b/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
@@ -4,7 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                         |
 |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)              |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)              |
 | [8d0950646](https://github.com/apache/airflow/commit/8d09506464c8480fa42e8bfe6a36c6f631cd23f6) | 2020-11-18  | Fix download method in GCSToBigQueryOperator (#12442)                           |
 | [2c0920fba](https://github.com/apache/airflow/commit/2c0920fba5d2f05d2e29cead91127686af277ec2) | 2020-11-17  | Adds mechanism for provider package discovery. (#12383)                         |
 | [2cda2f2a0](https://github.com/apache/airflow/commit/2cda2f2a0a94e5aaed87f0998fa57b4f8bff5e43) | 2020-11-17  | Add missing pre-commit definition - provider-yamls (#12393)                     |
diff --git a/airflow/providers/google/BACKPORT_PROVIDER_README.md b/airflow/providers/google/BACKPORT_PROVIDER_README.md
index 0f8f7a2..2eb2ada 100644
--- a/airflow/providers/google/BACKPORT_PROVIDER_README.md
+++ b/airflow/providers/google/BACKPORT_PROVIDER_README.md
@@ -593,7 +593,7 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/
 
 | Commit                                                                                         | Committed   | Subject                                                                         |
 |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)              |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)              |
 | [8d0950646](https://github.com/apache/airflow/commit/8d09506464c8480fa42e8bfe6a36c6f631cd23f6) | 2020-11-18  | Fix download method in GCSToBigQueryOperator (#12442)                           |
 | [2c0920fba](https://github.com/apache/airflow/commit/2c0920fba5d2f05d2e29cead91127686af277ec2) | 2020-11-17  | Adds mechanism for provider package discovery. (#12383)                         |
 | [2cda2f2a0](https://github.com/apache/airflow/commit/2cda2f2a0a94e5aaed87f0998fa57b4f8bff5e43) | 2020-11-17  | Add missing pre-commit definition - provider-yamls (#12393)                     |
diff --git a/airflow/providers/google/CHANGELOG.rst b/airflow/providers/google/CHANGELOG.rst
new file mode 100644
index 0000000..bed88c9
--- /dev/null
+++ b/airflow/providers/google/CHANGELOG.rst
@@ -0,0 +1,90 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+
+Changelog
+---------
+
+2.0.0
+.....
+
+Updated ``google-cloud-*`` libraries
+````````````````````````````````````
+
+This release of the provider package contains third-party library updates, which may require updating your
+DAG files or custom hooks and operators, if you were using objects from those libraries.
+Updating of these libraries is necessary to be able to use new features made available by new versions of
+the libraries and to obtain bug fixes that are only available for new versions of the library.
+
+Details are covered in the UPDATING.md files for each library, but there are some details
+that you should pay attention to.
+
+
++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+
+| Library name                                                                                        | Previous constraints | Current constraints | Upgrade Documentation                                                                                                                 |
++=====================================================================================================+======================+=====================+=======================================================================================================================================+
+| `google-cloud-bigquery-datatransfer <https://pypi.org/project/google-cloud-bigquery-datatransfer>`_ | ``>=0.4.0,<2.0.0``   | ``>=3.0.0,<4.0.0``  | `Upgrading google-cloud-bigquery-datatransfer <https://github.com/googleapis/python-bigquery-datatransfer/blob/master/UPGRADING.md>`_ |
++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+
+| `google-cloud-datacatalog <https://pypi.org/project/google-cloud-datacatalog>`_                     | ``>=0.5.0,<0.8``     | ``>=1.0.0,<2.0.0``  | `Upgrading google-cloud-datacatalog <https://github.com/googleapis/python-datacatalog/blob/master/UPGRADING.md>`_                     |
++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+
+| `google-cloud-os-login <https://pypi.org/project/google-cloud-os-login>`_                           | ``>=1.0.0,<2.0.0``   | ``>=2.0.0,<3.0.0``  | `Upgrading google-cloud-os-login <https://github.com/googleapis/python-oslogin/blob/master/UPGRADING.md>`_                            |
++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+
+| `google-cloud-pubsub <https://pypi.org/project/google-cloud-pubsub>`_                               | ``>=1.0.0,<2.0.0``   | ``>=2.0.0,<3.0.0``  | `Upgrading google-cloud-pubsub <https://github.com/googleapis/python-pubsub/blob/master/UPGRADING.md>`_                               |
++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+
+| `google-cloud-kms <https://pypi.org/project/google-cloud-kms>`_                                     | ``>=1.2.1,<2.0.0``   | ``>=2.0.0,<3.0.0``  | `Upgrading google-cloud-kms <https://github.com/googleapis/python-kms/blob/master/UPGRADING.md>`_                                     |
++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+
+| `google-cloud-tasks <https://pypi.org/project/google-cloud-tasks>`_                                 | ``>=1.2.1,<2.0.0``   | ``>=2.0.0,<3.0.0``  | `Upgrading google-cloud-task <https://github.com/googleapis/python-tasks/blob/master/UPGRADING.md>`_                                  |
++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+
+
+The field names use the snake_case convention
+`````````````````````````````````````````````
+
+If your DAG uses an object from the above mentioned libraries passed by XCom, it is necessary to update the
+naming convention of the fields that are read. Previously, the fields used the CamelSnake convention,
+now the snake_case convention is used.
+
+**Before:**
+
+.. code-block:: python
+
+    set_acl_permission = GCSBucketCreateAclEntryOperator(
+        task_id="gcs-set-acl-permission",
+        bucket=BUCKET_NAME,
+        entity="user-{{ task_instance.xcom_pull('get-instance')['persistenceIamIdentity']"
+        ".split(':', 2)[1] }}",
+        role="OWNER",
+    )
+
+
+**After:**
+
+.. code-block:: python
+
+    set_acl_permission = GCSBucketCreateAclEntryOperator(
+        task_id="gcs-set-acl-permission",
+        bucket=BUCKET_NAME,
+        entity="user-{{ task_instance.xcom_pull('get-instance')['persistence_iam_identity']"
+        ".split(':', 2)[1] }}",
+        role="OWNER",
+    )
+
+
+
+1.0.0
+.....
+
+Initial version of the provider.
diff --git a/airflow/providers/google/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/google/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index ec28ce5..0000000
--- a/airflow/providers/google/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,377 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [1dcd3e13f](https://github.com/apache/airflow/commit/1dcd3e13fd0a078fc9440e91b77f6f87aa60dd3b) | 2020-12-05  | Add support for extra links coming from the providers (#12472)                                                                                                     |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                                                                                                 |
-| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29  | Don&#39;t use time.time() or timezone.utcnow() for duration calculations (#12353)                                                                                      |
-| [76bcd08dc](https://github.com/apache/airflow/commit/76bcd08dcae8d62307f5e9b8c2e182b54ed22a27) | 2020-11-28  | Added `@apply_defaults` decorator. (#12620)                                                                                                                        |
-| [e1ebfa68b](https://github.com/apache/airflow/commit/e1ebfa68b109b5993c47891cfd0b9b7e46b6d770) | 2020-11-27  | Add DataflowJobMessagesSensor and DataflowAutoscalingEventsSensor (#12249)                                                                                         |
-| [3fa51f94d](https://github.com/apache/airflow/commit/3fa51f94d7a17f170ddc31908d36c91f4456a20b) | 2020-11-24  | Add check for duplicates in provider.yaml files (#12578)                                                                                                           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [9e3b2c554](https://github.com/apache/airflow/commit/9e3b2c554dadf58972198e4e16f15af2f15ec37a) | 2020-11-19  | GCP Secrets Optional Lookup (#12360)                                                                                                                               |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)                                                                                        |
-| [8d0950646](https://github.com/apache/airflow/commit/8d09506464c8480fa42e8bfe6a36c6f631cd23f6) | 2020-11-18  | Fix download method in GCSToBigQueryOperator (#12442)                                                                                                              |
-| [2c0920fba](https://github.com/apache/airflow/commit/2c0920fba5d2f05d2e29cead91127686af277ec2) | 2020-11-17  | Adds mechanism for provider package discovery. (#12383)                                                                                                            |
-| [2cda2f2a0](https://github.com/apache/airflow/commit/2cda2f2a0a94e5aaed87f0998fa57b4f8bff5e43) | 2020-11-17  | Add missing pre-commit definition - provider-yamls (#12393)                                                                                                        |
-| [80a957f14](https://github.com/apache/airflow/commit/80a957f142f260daed262b8e93a4d02c12cfeabc) | 2020-11-17  | Add Dataflow sensors - job metrics (#12039)                                                                                                                        |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [917e6c442](https://github.com/apache/airflow/commit/917e6c4424985271c53dd8c413b211896ee55726) | 2020-11-16  | Add provide_file_and_upload to GCSHook (#12310)                                                                                                                    |
-| [cfa4ecfeb](https://github.com/apache/airflow/commit/cfa4ecfeb02661f40b4778733384ac085fb5f04b) | 2020-11-15  | Add DataflowJobStatusSensor and support non-blocking execution of jobs (#11726)                                                                                    |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [32b59f835](https://github.com/apache/airflow/commit/32b59f8350f55793df6838a32de662a80483ecda) | 2020-11-12  | Fixes the sending of an empty list to BigQuery `list_rows` (#12307)                                                                                                |
-| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10  | Fix spelling in Python files (#12230)                                                                                                                              |
-| [502ba309e](https://github.com/apache/airflow/commit/502ba309ea470943f0e99c634269e3d2d13ce6ca) | 2020-11-10  | Enable Markdownlint rule - MD022/blanks-around-headings (#12225)                                                                                                   |
-| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10  | Simplify string expressions &amp; Use f-string (#12216)                                                                                                                |
-| [f37c6e6fc](https://github.com/apache/airflow/commit/f37c6e6fce8b704f5af28caa16d0ed7d873a0e4a) | 2020-11-10  | Add Compute Engine SSH hook (#9879)                                                                                                                                |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [61feb6ec4](https://github.com/apache/airflow/commit/61feb6ec453f8dda1a0e1fe3ebcc0f1e3224b634) | 2020-11-09  | Provider&#39;s readmes generated for elasticsearch and google packages (#12194)                                                                                        |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08  | Add authentication to AWS with Google credentials (#12079)                                                                                                         |
-| [2ef3b7ef8](https://github.com/apache/airflow/commit/2ef3b7ef8cafe3bdc8bf8db70fbc519b98576366) | 2020-11-08  | Fix ERROR - Object of type &#39;bytes&#39; is not JSON serializable when using store_to_xcom_key parameter (#12172)                                                        |
-| [0caec9fd3](https://github.com/apache/airflow/commit/0caec9fd32bee2b3036b5d7bdcb56bd6a3b9dccf) | 2020-11-06  | Dataflow - add waiting for successful job cancel (#11501)                                                                                                          |
-| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06  | Simplify string expressions (#12123)                                                                                                                               |
-| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04  | Format all files (without excepions) by black (#12091)                                                                                                             |
-| [fd3db778e](https://github.com/apache/airflow/commit/fd3db778e715d0f164dda7ee8f672d477a323291) | 2020-11-04  | Add server side cursor support for postgres to GCS operator (#11793)                                                                                               |
-| [f1f194026](https://github.com/apache/airflow/commit/f1f1940261744b4fdb67b0b5654488494efa9c64) | 2020-11-04  | Add DataflowStartSQLQuery operator (#8553)                                                                                                                         |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                                                                                               |
-| [5f5244b74](https://github.com/apache/airflow/commit/5f5244b74df93cadbb99643cec76281460ca4411) | 2020-11-04  | Add template fields renderers to Biguery and Dataproc operators (#12067)                                                                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [45ae145c2](https://github.com/apache/airflow/commit/45ae145c25a19b4185c33ac0c4da920324b3987e) | 2020-11-03  | Log BigQuery job id in insert method of BigQueryHook (#12056)                                                                                                      |
-| [e324b37a6](https://github.com/apache/airflow/commit/e324b37a67e32c368df50604a00160d7766b5c33) | 2020-11-03  | Add job name and progress logs to Cloud Storage Transfer Hook (#12014)                                                                                             |
-| [6071fdd58](https://github.com/apache/airflow/commit/6071fdd58470bb2a6c23fc16481e292b7247d0bb) | 2020-11-02  | Improve handling server errors in DataprocSubmitJobOperator (#11947)                                                                                               |
-| [2f703df12](https://github.com/apache/airflow/commit/2f703df12dfd6511722ff9a82d5a569d092fccc2) | 2020-10-30  | Add SalesforceToGcsOperator (#10760)                                                                                                                               |
-| [e5713e00b](https://github.com/apache/airflow/commit/e5713e00b3afcba6f78006ec0e360da317858e4d) | 2020-10-29  | Add drain option when canceling Dataflow pipelines (#11374)                                                                                                        |
-| [37eaac3c5](https://github.com/apache/airflow/commit/37eaac3c5dc93804413c10a6ca124fd7831befc0) | 2020-10-29  | The PRs which are not approved run subset of tests (#11828)                                                                                                        |
-| [79cb77199](https://github.com/apache/airflow/commit/79cb771992279d40ddd9eb6b0277382313a32898) | 2020-10-28  | Fixing re pattern and changing to use a single character class. (#11857)                                                                                           |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [240c7d4d7](https://github.com/apache/airflow/commit/240c7d4d72aac8f6aab98f5913e8f54c4f1372ff) | 2020-10-26  | Google Memcached hooks - improve protobuf messages handling (#11743)                                                                                               |
-| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26  | Fix spellings (#11825)                                                                                                                                             |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24  | Fix spelling (#11821)                                                                                                                                              |
-| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24  | Use Python 3 style super classes (#11806)                                                                                                                          |
-| [727c739af](https://github.com/apache/airflow/commit/727c739afb565d4d394a8faedc969334cb8e738e) | 2020-10-22  | Improve Cloud Memorystore for Redis example (#11735)                                                                                                               |
-| [1da8379c9](https://github.com/apache/airflow/commit/1da8379c913843834353b44861c62f332a461bdf) | 2020-10-22  | Fix static checks after merging #10121 (#11737)                                                                                                                    |
-| [91503308c](https://github.com/apache/airflow/commit/91503308c723b186ce6f4026f2a3e2c21030f6e5) | 2020-10-22  | Add Google Cloud Memorystore Memcached Operators (#10121)                                                                                                          |
-| [950c16d0b](https://github.com/apache/airflow/commit/950c16d0b0ab67bb7af11909de751029faf0313a) | 2020-10-21  | Retry requests in case of error in Google ML Engine Hook (#11712)                                                                                                  |
-| [2bfc53b5e](https://github.com/apache/airflow/commit/2bfc53b5eb67406d418371b74dc9bc5a07be238e) | 2020-10-21  | Fix doc errors in google provider files. (#11713)                                                                                                                  |
-| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21  | Enforce strict rules for yamllint (#11709)                                                                                                                         |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [2d854c350](https://github.com/apache/airflow/commit/2d854c3505ccad66e9a7d94267e51bed800433c2) | 2020-10-19  | Add service_account to Google ML Engine operator (#11619)                                                                                                          |
-| [46a121fb7](https://github.com/apache/airflow/commit/46a121fb7b77c0964e053b58750e2d8bc2bd0b2a) | 2020-10-18  | docs: Update Bigquery clustering docstrings (#11232)                                                                                                               |
-| [49c58147f](https://github.com/apache/airflow/commit/49c58147fed8a52869d0b0ecc00c102c11972ad0) | 2020-10-18  | Strict type checking for provider Google (#11609)                                                                                                                  |
-| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16  | Add type annotations for AWS operators and hooks (#11434)                                                                                                          |
-| [3c10ca650](https://github.com/apache/airflow/commit/3c10ca6504be37fabff9a10caefea3fe4df31a02) | 2020-10-16  | Add DataflowStartFlexTemplateOperator (#8550)                                                                                                                      |
-| [8865d14df](https://github.com/apache/airflow/commit/8865d14df4d58dd5f1a4d2ff81c77469959f175a) | 2020-10-16  | Strict type checking for provider google cloud  (#11548)                                                                                                           |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [06141d6d0](https://github.com/apache/airflow/commit/06141d6d01398115e5e54c5766a46ae5514ba2f7) | 2020-10-12  | Google cloud operator strict type check (#11450)                                                                                                                   |
-| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12  | Remove redundant None provided as default to dict.get() (#11448)                                                                                                   |
-| [1845cd11b](https://github.com/apache/airflow/commit/1845cd11b77f302777ab854e84bef9c212c604a0) | 2020-10-11  | Strict type check for google ads and cloud hooks (#11390)                                                                                                          |
-| [bd204bb91](https://github.com/apache/airflow/commit/bd204bb91b4bc069284f9a44757c6baba8884140) | 2020-10-11  | Optionally set null marker in csv exports in BaseSQLToGCSOperator (#11409)                                                                                         |
-| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10  | Remove redundant parentheses from Python files (#10967)                                                                                                            |
-| [8baf657fc](https://github.com/apache/airflow/commit/8baf657fc2b21a601b99b752e4f1176bf8a934ce) | 2020-10-09  | Fix regression in DataflowTemplatedJobStartOperator (#11167)                                                                                                       |
-| [b0fcf6755](https://github.com/apache/airflow/commit/b0fcf675595494b306800e1a516548dc0dc671f8) | 2020-10-07  | Add AzureFileShareToGCSOperator (#10991)                                                                                                                           |
-| [47b05a87f](https://github.com/apache/airflow/commit/47b05a87f004dc273a4757ba49f03808a86f77e7) | 2020-10-07  | Improve handling of job_id in BigQuery operators (#11287)                                                                                                          |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [cb52fb0ae](https://github.com/apache/airflow/commit/cb52fb0ae1de1f1140babaed0e97299e4aaf96bf) | 2020-09-27  | Add example DAG and system test for MySQLToGCSOperator (#10990)                                                                                                    |
-| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25  | Fix incorrect Usage of Optional[str] &amp; Optional[int] (#11141)                                                                                                      |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                                                                                                     |
-| [daf8f3108](https://github.com/apache/airflow/commit/daf8f31080f06c044b4336071bd383bbbcdc6085) | 2020-09-23  | Add template fields renderers for better UI rendering (#11061)                                                                                                     |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                                                                                                 |
-| [cb979f9f2](https://github.com/apache/airflow/commit/cb979f9f213bb3c9835a3dc924f84a07f5387378) | 2020-09-22  | Get Airflow configs with sensitive data from CloudSecretManagerBackend (#11024)                                                                                    |
-| [76545bb3d](https://github.com/apache/airflow/commit/76545bb3d6fa82ce8eae072dbc74a3b76d8fd53c) | 2020-09-16  | Add example dag and system test for S3ToGCSOperator (#10951)                                                                                                       |
-| [22c631625](https://github.com/apache/airflow/commit/22c631625fd68abe280528f33b7cfd7603ebf66c) | 2020-09-16  | Fix more docs spellings (#10965)                                                                                                                                   |
-| [12a652f53](https://github.com/apache/airflow/commit/12a652f5344c7f03c3d780556ca1829b235fdb2d) | 2020-09-13  | Fix parameter name collision in AutoMLBatchPredictOperator #10723 (#10869)                                                                                         |
-| [41a62735e](https://github.com/apache/airflow/commit/41a62735edcebbd9c39e505280646ef5d25aa1d5) | 2020-09-11  | Add on_kill method to BigQueryInsertJobOperator (#10866)                                                                                                           |
-| [3e91da56e](https://github.com/apache/airflow/commit/3e91da56e8c63a90dc859d8996a896b5d9f8cd43) | 2020-09-11  | fix typo in firebase/example_filestore DAG (#10875)                                                                                                                |
-| [68cc7273b](https://github.com/apache/airflow/commit/68cc7273bf0c0f562748b5f663da5c12d2cba6a7) | 2020-09-10  | Add on_kill method to DataprocSubmitJobOperator (#10847)                                                                                                           |
-| [f92095721](https://github.com/apache/airflow/commit/f92095721450c14605c986e165544a7bfb712a3d) | 2020-09-10  | Fix and remove some more typos from spelling_wordlist.txt (#10845)                                                                                                 |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [078bfaf60](https://github.com/apache/airflow/commit/078bfaf60adc5aebac8c347e7f6e5339ab9b56c0) | 2020-09-08  | Extract missing gcs_to_local example DAG from gcs example (#10767)                                                                                                 |
-| [10ce31127](https://github.com/apache/airflow/commit/10ce31127f1ff87176158935925afce46a989917) | 2020-09-08  | Deprecate using global as the default region in Google Dataproc operators and hooks (#10772)                                                                       |
-| [f14f37971](https://github.com/apache/airflow/commit/f14f3797163cc45fdcdabfb36ee7d638f70e470d) | 2020-09-07  | [AIRFLOW-10672] Refactor BigQueryToGCSOperator to use new method (#10773)                                                                                          |
-| [c8ee45568](https://github.com/apache/airflow/commit/c8ee4556851c36b3b6e644a7746a49583dd53db1) | 2020-09-07  | Refactor DataprocCreateCluster operator to use simpler interface (#10403)                                                                                          |
-| [ece685b5b](https://github.com/apache/airflow/commit/ece685b5b895ad1175440b49bf9e620dffd8248d) | 2020-09-05  | Asynchronous execution of Dataproc jobs with a Sensor (#10673)                                                                                                     |
-| [6e3d7b63d](https://github.com/apache/airflow/commit/6e3d7b63d3b34c34f8b38a7b41f4a5876e1f731f) | 2020-09-04  | Add masterConfig parameter to MLEngineStartTrainingJobOperator (#10578)                                                                                            |
-| [804548d58](https://github.com/apache/airflow/commit/804548d58f2036fd4516824a38d0639ba5d5ab0e) | 2020-09-01  | Add Dataprep operators (#10304)                                                                                                                                    |
-| [11c00bc82](https://github.com/apache/airflow/commit/11c00bc820483691a87cdb16d519dce8dc57c40e) | 2020-08-30  | Fix typos: duplicated &#34;the&#34; (#10647)                                                                                                                               |
-| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29  | Update Google Cloud branding (#10642)                                                                                                                              |
-| [1b533f617](https://github.com/apache/airflow/commit/1b533f617e2e0200597d114d7570f6c0d69da1a0) | 2020-08-28  | Fix broken master - DLP (#10635)                                                                                                                                   |
-| [5ae82a56d](https://github.com/apache/airflow/commit/5ae82a56dab599de44f1be7027cecc4ef86f7bb6) | 2020-08-28  | Fix Google DLP example and improve ops idempotency (#10608)                                                                                                        |
-| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28  | Update Google Cloud branding (#10615)                                                                                                                              |
-| [91ff31ad1](https://github.com/apache/airflow/commit/91ff31ad1021235bd21c87ad9dbc0b216a908671) | 2020-08-27  | Documentation for Google Cloud Data Loss Prevention (#8201) (#9651)                                                                                                |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25  | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)                                                                                             |
-| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25  | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530)                                                                                              |
-| [866701c80](https://github.com/apache/airflow/commit/866701c8019f49dcb02c9696e4f6e9ce67d13ca6) | 2020-08-25  | Fix typo in &#34;Cloud&#34; (#10534)                                                                                                                                       |
-| [47265e7b5](https://github.com/apache/airflow/commit/47265e7b58bc28bcbbffc981442b6cc27a3af39c) | 2020-08-24  | Fix typo in PostgresHook (#10529)                                                                                                                                  |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                                                                                                     |
-| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24  | Implement impersonation in google operators (#10052)                                                                                                               |
-| [b0598b535](https://github.com/apache/airflow/commit/b0598b5351d2d027286e2333231b6c0c0704dba2) | 2020-08-24  | Add support for creating multiple replicated clusters in Bigtable hook and operator (#10475)                                                                       |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [515cc72c9](https://github.com/apache/airflow/commit/515cc72c995429c8c007f853ade385d79fcbac90) | 2020-08-22  | Fix typo in timed_out (#10459)                                                                                                                                     |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                                                                                               |
-| [88c7d2e52](https://github.com/apache/airflow/commit/88c7d2e526af4994066f65f830e2fa8edcbbce2e) | 2020-08-21  | Dataflow operators don&#39;t not always create a virtualenv (#10373)                                                                                                   |
-| [083c3c129](https://github.com/apache/airflow/commit/083c3c129bc3458d410f5ff37d7f5a9a7ad548b7) | 2020-08-18  | Simplified GCSTaskHandler configuration (#10365)                                                                                                                   |
-| [1ae5bdf23](https://github.com/apache/airflow/commit/1ae5bdf23e3ac7cca05325ef8b255a7cf067e18e) | 2020-08-17  | Add test for GCSTaskHandler (#9600) (#9861)                                                                                                                        |
-| [e195a980b](https://github.com/apache/airflow/commit/e195a980bc8e9d42f3eb4ac134950977b9e5158f) | 2020-08-16  | Add type annotations for mlengine_operator_utils (#10297)                                                                                                          |
-| [382c1011b](https://github.com/apache/airflow/commit/382c1011b6bcebd22760e2f98419281ef1a09d1b) | 2020-08-16  | Add Bigtable Update Instance Hook/Operator (#10340)                                                                                                                |
-| [bfa5a8d5f](https://github.com/apache/airflow/commit/bfa5a8d5f10458c14d380c4042ecfbac627d0639) | 2020-08-15  | CI: Fix failing docs-build (#10342)                                                                                                                                |
-| [be46d20fb](https://github.com/apache/airflow/commit/be46d20fb431cc1d91c935e8894dfc7756c18993) | 2020-08-15  | Improve idempotency of BigQueryInsertJobOperator (#9590)                                                                                                           |
-| [47387a69e](https://github.com/apache/airflow/commit/47387a69e623676b57b6d42ff07e729da2d21bff) | 2020-08-14  | Catch Permission Denied exception when getting secret from GCP Secret Manager. (#10326)                                                                            |
-| [2f0613b0c](https://github.com/apache/airflow/commit/2f0613b0c2fdf176d9f13a8cd12162c60c64b644) | 2020-08-13  | Implement Google BigQuery Table Partition Sensor (#10218)                                                                                                          |
-| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12  | Enable Sphinx spellcheck for doc generation (#10280)                                                                                                               |
-| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12  | DbApiHook: Support kwargs in get_pandas_df (#9730)                                                                                                                 |
-| [ef088314f](https://github.com/apache/airflow/commit/ef088314f8f1b29ac636a7584cf9dda04b1df816) | 2020-08-09  | Added DataprepGetJobsForJobGroupOperator (#10246)                                                                                                                  |
-| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09  | Fix various typos in the repo (#10263)                                                                                                                             |
-| [c29533888](https://github.com/apache/airflow/commit/c29533888fadd40f5e9ce63e728bd8691182e542) | 2020-08-08  | Add labels param to Google MLEngine Operators (#10222)                                                                                                             |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                                                                                        |
-| [eff0f0321](https://github.com/apache/airflow/commit/eff0f03210d30a4aed9ed457eaaea9c9f05d54d1) | 2020-08-06  | Update guide for Google Cloud Secret Manager Backend (#10172)                                                                                                      |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)                                                                                               |
-| [010322692](https://github.com/apache/airflow/commit/010322692e6e3f0adc156f0beb81e267da0e97bb) | 2020-08-06  | Improve handling Dataproc cluster creation with ERROR state (#9593)                                                                                                |
-| [1437cb749](https://github.com/apache/airflow/commit/1437cb74955f4e10af5d70ebadde1e6b163fb9b7) | 2020-08-04  | Add correct signatures for operators in google provider package (#10144)                                                                                           |
-| [6efa1b9cb](https://github.com/apache/airflow/commit/6efa1b9cb763ae0bdbc884a54d24dbdc39d9e3a6) | 2020-08-03  | Add additional Cloud Datastore operators (#10032)                                                                                                                  |
-| [27020f8e5](https://github.com/apache/airflow/commit/27020f8e588575d53e63f9f9daecd3a522656644) | 2020-08-03  | Add try clause to DataFusionHook.wait_for_pipeline_state (#10031)                                                                                                  |
-| [4e3799fec](https://github.com/apache/airflow/commit/4e3799fec4c23d0f43603a0489c5a6158aeba035) | 2020-08-02  | [AIRFLOW-4541] Replace os.mkdirs usage with pathlib.Path(path).mkdir (#10117)                                                                                      |
-| [85c56b173](https://github.com/apache/airflow/commit/85c56b1737c2bf61751836571300445c0aebae1a) | 2020-08-02  | Add missing params to GCP Pub/Sub creation_subscription (#10106)                                                                                                   |
-| [b79466c12](https://github.com/apache/airflow/commit/b79466c12f3ae717c31804acc2e9ffcd60f9611c) | 2020-08-02  | Fix sensor not providing arguments for GCSHook (#10074)                                                                                                            |
-| [4ee35d027](https://github.com/apache/airflow/commit/4ee35d027988c6456767faeb108a7f686d5117f2) | 2020-08-02  | Fix hook not passing gcp_conn_id to base class (#10075)                                                                                                            |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                                                                                               |
-| [4c84661ad](https://github.com/apache/airflow/commit/4c84661adb5bb5c581bb4193b4c7e935cbe07758) | 2020-07-31  | Split Display Video 360 example into smaler DAGs (#10077)                                                                                                          |
-| [59cbff087](https://github.com/apache/airflow/commit/59cbff0874dd5318cda4b9ce7b7eeb1aad1dad4d) | 2020-07-29  | Fix docstrings in BigQueryGetDataOperator (#10042)                                                                                                                 |
-| [81b87d48e](https://github.com/apache/airflow/commit/81b87d48ed002d7a7f7bcb72a58e82d40a176fe2) | 2020-07-27  | Add unit tests for GcpBodyFieldSanitizer in Google providers (#9996)                                                                                               |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25  | Stop using start_date in default_args in example_dags (#9982)                                                                                                      |
-| [ef98edf4d](https://github.com/apache/airflow/commit/ef98edf4da2d9b74d5cf5b21e81577b3151edb79) | 2020-07-23  | Add more information about using GoogleAdsHook (#9951)                                                                                                             |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                                                                                        |
-| [39a0288a4](https://github.com/apache/airflow/commit/39a0288a47536dfd9b651ecd075887d3e45fcfc4) | 2020-07-22  | Add Google Authentication for experimental API (#9848)                                                                                                             |
-| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22  | More strict rules in mypy (#9705) (#9906)                                                                                                                          |
-| [c4244e18b](https://github.com/apache/airflow/commit/c4244e18bb894eb2180b8972052e56110fe5cbc9) | 2020-07-22  | Fix calling `get_client` in BigQueryHook.table_exists (#9916)                                                                                                      |
-| [5eacc1642](https://github.com/apache/airflow/commit/5eacc164201a121cd06126aff613cbe0919d35cc) | 2020-07-22  | Add support for impersonation in GCP hooks (#9915)                                                                                                                 |
-| [1cfdebf5f](https://github.com/apache/airflow/commit/1cfdebf5f8841d61a11540b88c7913686e89e085) | 2020-07-21  | Fix insert_job method of BigQueryHook (#9899)                                                                                                                      |
-| [c8c52e69c](https://github.com/apache/airflow/commit/c8c52e69c8d9cc1f26f63d95aecc0a6498d40b6f) | 2020-07-21  | Remove type hint causing DeprecationWarning in Firestore operators (#9819)                                                                                         |
-| [eb6f1d1cf](https://github.com/apache/airflow/commit/eb6f1d1cf0503fa763c0d8d34a2fe16efb390b9c) | 2020-07-16  | Fix typo in datafusion operator (#9859)                                                                                                                            |
-| [b01d95ec2](https://github.com/apache/airflow/commit/b01d95ec22b01ed79123178acd74ef40d57aaa7c) | 2020-07-15  | Change DAG.clear to take dag_run_state (#9824)                                                                                                                     |
-| [6d65c15d1](https://github.com/apache/airflow/commit/6d65c15d156a41d5e735e44a1170426559a17d1f) | 2020-07-15  | Add guide for AI Platform (previously Machine Learning Engine) Operators  (#9798)                                                                                  |
-| [770de53eb](https://github.com/apache/airflow/commit/770de53eb57bd57ffc555ad15b18f0c058dbebe7) | 2020-07-15  | BigQueryTableExistenceSensor needs to specify keyword arguments (#9832)                                                                                            |
-| [2d8dbacdf](https://github.com/apache/airflow/commit/2d8dbacdf6c19a598a7f55bcf65e28703aed6201) | 2020-07-15  | Add CloudVisionDeleteReferenceImageOperator  (#9698)                                                                                                               |
-| [9f017951b](https://github.com/apache/airflow/commit/9f017951b94d9bf52b5ee66d72aa8dd822f07269) | 2020-07-15  | Add Google Deployment Manager Hook (#9159)                                                                                                                         |
-| [ed5004cca](https://github.com/apache/airflow/commit/ed5004cca753650dc222fbb8e67573938c6c16d9) | 2020-07-14  | Allow `replace` flag in gcs_to_gcs operator. (#9667)                                                                                                               |
-| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13  | Keep functions signatures in decorators (#9786)                                                                                                                    |
-| [68925904e](https://github.com/apache/airflow/commit/68925904e49aac6968defb6834863f4e6347fe59) | 2020-07-13  | Add multiple file upload functionality to GCS hook (#8849)                                                                                                         |
-| [1de78e8f9](https://github.com/apache/airflow/commit/1de78e8f97f48f8f4abd167a0120ffab8af6127a) | 2020-07-12  | Add Google Stackdriver link (#9765)                                                                                                                                |
-| [092d33f29](https://github.com/apache/airflow/commit/092d33f298a7dbb871b1e1b4c17aad3989e89b79) | 2020-07-11  | Fix StackdriverTaskHandler + add system tests (#9761)                                                                                                              |
-| [b2305660f](https://github.com/apache/airflow/commit/b2305660f0eb55ebd31fdc7fe4e8aeed8c1f8c00) | 2020-07-09  | Update example DAG for AI Platform operators (#9727)                                                                                                               |
-| [23f80f34a](https://github.com/apache/airflow/commit/23f80f34adec86da24e4896168c53d213d01a7f6) | 2020-07-08  | Move gcs &amp; wasb task handlers to their respective provider packages (#9714)                                                                                        |
-| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06  | Upgrade to latest pre-commit checks (#9686)                                                                                                                        |
-| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06  | Move provider&#39;s log task handlers to the provider package (#9604)                                                                                                  |
-| [cd3d9d934](https://github.com/apache/airflow/commit/cd3d9d93402f06a08f35e3586802f11a18c4f1f3) | 2020-07-02  | Fix using .json template extension in GMP operators (#9566)                                                                                                        |
-| [4799af30e](https://github.com/apache/airflow/commit/4799af30ee02c596647d1538854769124f9f4961) | 2020-06-30  | Extend BigQuery example with include clause (#9572)                                                                                                                |
-| [e33f1a12d](https://github.com/apache/airflow/commit/e33f1a12d72ac234e4897f44b326a332acf85901) | 2020-06-30  | Add template_ext to BigQueryInsertJobOperator (#9568)                                                                                                              |
-| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29  | Remove almost all references to airflow.contrib (#9559)                                                                                                            |
-| [c420dbd6e](https://github.com/apache/airflow/commit/c420dbd6e13e17867eb4ccc4271b37966310ac0f) | 2020-06-27  | Bump Pylint to 2.5.3 (#9294)                                                                                                                                       |
-| [0051c89cb](https://github.com/apache/airflow/commit/0051c89cba02d55236c913ce0110f7d5111ba436) | 2020-06-26  | nitpick fix (#9527)                                                                                                                                                |
-| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25  | Use literal syntax instead of function calls to create data structure (#9516)                                                                                      |
-| [7256f4caa](https://github.com/apache/airflow/commit/7256f4caa226f8f8632d6e2d38d8c94cb3250a6f) | 2020-06-22  | Pylint fixes and deprecation of rare used methods in Connection (#9419)                                                                                            |
-| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21  | Enable &amp; Fix Whitespace related PyDocStyle Checks (#9458)                                                                                                          |
-| [5b680e27e](https://github.com/apache/airflow/commit/5b680e27e8118861ef484c00a4b87c6885b0a518) | 2020-06-19  | Don&#39;t use connection to store task handler credentials (#9381)                                                                                                     |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [416334e2e](https://github.com/apache/airflow/commit/416334e2ecd21d8a532af6102f1cfa9ac921a97a) | 2020-06-19  | Properly propagated warnings in operators (#9348)                                                                                                                  |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [4e09c6442](https://github.com/apache/airflow/commit/4e09c64423bfaabd02a18b5fe7757dc15451ab73) | 2020-06-18  | Adds GCP Secret Manager Hook (#9368)                                                                                                                               |
-| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18  | Detect automatically the lack of reference to the guide in the operator descriptions (#9290)                                                                       |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [639972d99](https://github.com/apache/airflow/commit/639972d995d848b16a3f283576efdbde28b8fdef) | 2020-06-16  | Add support for latest Apache Beam SDK in Dataflow operators (#9323)                                                                                               |
-| [1459970b3](https://github.com/apache/airflow/commit/1459970b3b9780e139ce029ae889fd8f69a37bc7) | 2020-06-15  | Rename CloudBuildCreateBuildOperator to CloudBuildCreateOperator (#9314)                                                                                           |
-| [431ea3291](https://github.com/apache/airflow/commit/431ea3291c9bf236bccdf8446d753c630ada2b25) | 2020-06-15  | Resolve upstream tasks when template field is XComArg (#8805)                                                                                                      |
-| [aee6ab94e](https://github.com/apache/airflow/commit/aee6ab94eb956347ad560cfe2673bc6011074513) | 2020-06-15  | Wait for pipeline state in Data Fusion operators (#8954)                                                                                                           |
-| [fb1c8b83d](https://github.com/apache/airflow/commit/fb1c8b83d400506a16c10e3d6623a913847e5cf5) | 2020-06-10  | Add test for BQ operations using location (#9206)                                                                                                                  |
-| [a26afbfa5](https://github.com/apache/airflow/commit/a26afbfa51b0981ae742c6171938b57a80aace2b) | 2020-06-10  | Make generated job_id more informative in BQ insert_job (#9203)                                                                                                    |
-| [c41192fa1](https://github.com/apache/airflow/commit/c41192fa1fc5c2b3e7b8414c59f656ab67bbef28) | 2020-06-10  | Upgrade pendulum to latest major version ~2.0 (#9184)                                                                                                              |
-| [b1c8c5ed5](https://github.com/apache/airflow/commit/b1c8c5ed5bba3a852a5446f3fdd1131b4b22637a) | 2020-06-09  | Allows using private endpoints in GKEStartPodOperator (#9169)                                                                                                      |
-| [5918efc86](https://github.com/apache/airflow/commit/5918efc86a2217caa641a6ada289eee1c21407f8) | 2020-06-05  | Add 3.8 to the test matrices (#8836)                                                                                                                               |
-| [9bcdadaf7](https://github.com/apache/airflow/commit/9bcdadaf7e6e73d3d2246fbbd32a9f30a1b43ca9) | 2020-06-05  | Add &#39;main&#39; param to template_fields in DataprocSubmitPySparkJobOperator (#9154)                                                                                    |
-| [f56811dff](https://github.com/apache/airflow/commit/f56811dff3af66cbceb0418f11e00507bab58674) | 2020-06-05  | [AIRFLOW-6290] Create guide for GKE operators (#8883)                                                                                                              |
-| [76962867b](https://github.com/apache/airflow/commit/76962867b5877cf5ffd1b6004453f783c0732ab1) | 2020-06-04  | Fix sql_to_gcs hook gzip of schema_file (#9140)                                                                                                                    |
-| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02  | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106)                                                                                     |
-| [789852546](https://github.com/apache/airflow/commit/78985254683c359f7444a7eb5f6ee4967c37d61f) | 2020-06-01  | Add BigQueryInsertJobOperator (#8868)                                                                                                                              |
-| [29eb68b90](https://github.com/apache/airflow/commit/29eb68b90b5df692ac322be0939af5e7fa9b71bc) | 2020-05-31  | Create guide for Dataproc Operators (#9037)                                                                                                                        |
-| [886afaf62](https://github.com/apache/airflow/commit/886afaf622602aa97f925bc3ee4fc27aa995c445) | 2020-05-29  | Add example dag and system test for LocalFilesystemToGCSOperator (#9043)                                                                                           |
-| [a779c4dfc](https://github.com/apache/airflow/commit/a779c4dfc278d6ece480b012764ea5814dc78dee) | 2020-05-29  | add separate example dags and system tests for GCSToGoogleSheetsOperator (#9066)                                                                                   |
-| [ada26be23](https://github.com/apache/airflow/commit/ada26be23c913796c2ae77b91cb7d113dfec75a6) | 2020-05-29  | Add correct description for dst param in LocalFilesystemToGCSOperator (#9055)                                                                                      |
-| [81b2761b8](https://github.com/apache/airflow/commit/81b2761b86dae2d21a6ee859d49c08d46fea6def) | 2020-05-29  | add example dag and system test for GoogleSheetsToGCSOperator (#9056)                                                                                              |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [3994030ea](https://github.com/apache/airflow/commit/3994030ea678727daaf9c2bfed0ca94a096f8d2a) | 2020-05-26  | Refactor BigQuery operators (#8858)                                                                                                                                |
-| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26  | All classes in backport providers are now importable in Airflow 1.10 (#8991)                                                                                       |
-| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
-| [cf5cf45e1](https://github.com/apache/airflow/commit/cf5cf45e1c0dff9a40e02f0dc221542f974831a7) | 2020-05-23  | Support YAML input for CloudBuildCreateOperator (#8808)                                                                                                            |
-| [499493c5c](https://github.com/apache/airflow/commit/499493c5c5cf324ab8452ead80a10b71ce0c3b14) | 2020-05-19  | [AIRFLOW-6586] Improvements to gcs sensor (#7197)                                                                                                                  |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [841d81664](https://github.com/apache/airflow/commit/841d81664737c25d73d095a7dab5de80d369c87c) | 2020-05-19  | Allow setting the pooling time in DLPHook (#8824)                                                                                                                  |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [15273f0ea](https://github.com/apache/airflow/commit/15273f0ea05ec579c631ce26b5d620233ebdc4d2) | 2020-05-16  | Check for same task instead of Equality to detect Duplicate Tasks (#8828)                                                                                          |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [e1e833bb2](https://github.com/apache/airflow/commit/e1e833bb260879ecb9a1f80f28450a3656c0e598) | 2020-05-13  | Update GoogleBaseHook to not follow 308 and use 60s timeout (#8816)                                                                                                |
-| [8b5491971](https://github.com/apache/airflow/commit/8b54919711a203c3f35d98c6310a55d4df5da590) | 2020-05-12  | Refactor BigQuery hook methods to use python library (#8631)                                                                                                       |
-| [6911dfe83](https://github.com/apache/airflow/commit/6911dfe8372a33df67ce1fdd3c2bca1047718f60) | 2020-05-12  | Fix template fields in Google operators (#8840)                                                                                                                    |
-| [4b06fde0f](https://github.com/apache/airflow/commit/4b06fde0f10ce178b3c336c5d901e3b089f2863d) | 2020-05-12  | Fix Flake8 errors (#8841)                                                                                                                                          |
-| [1d12c347c](https://github.com/apache/airflow/commit/1d12c347cb258e7081804da1f9f5ffdedc003163) | 2020-05-12  | Refactor BigQuery check operators (#8813)                                                                                                                          |
-| [493b685d7](https://github.com/apache/airflow/commit/493b685d7879cfee532390ba0909d4b1d8764267) | 2020-05-10  | Add separate example DAGs and system tests for google cloud speech (#8778)                                                                                         |
-| [79ef8bed8](https://github.com/apache/airflow/commit/79ef8bed891c22eb76adf99158288d1b44426dc0) | 2020-05-10  | Added Upload Multiple Entity Read Files to specified big query dataset (#8610)                                                                                     |
-| [280f1f0c4](https://github.com/apache/airflow/commit/280f1f0c4cc49aba1b2f8b456326795733769d18) | 2020-05-10  | Correctly restore upstream_task_ids when deserializing Operators (#8775)                                                                                           |
-| [58aefb23b](https://github.com/apache/airflow/commit/58aefb23b1d456bbb24876a4e3ff14f25d6274b0) | 2020-05-08  | Added SDFtoGCSOperator (#8740)                                                                                                                                     |
-| [723c52c94](https://github.com/apache/airflow/commit/723c52c942b49b0e8c8fa8667a4a6a45fa249498) | 2020-05-07  | Add documentation for SpannerDeployInstanceOperator (#8750)                                                                                                        |
-| [25ee4211b](https://github.com/apache/airflow/commit/25ee4211b345ce7c19fb7366fd230838c34f1d47) | 2020-05-06  | Support all RuntimeEnvironment parameters in DataflowTemplatedJobStartOperator (#8531)                                                                             |
-| [8d6f1aa4b](https://github.com/apache/airflow/commit/8d6f1aa4b5bb8809ffc55dc0c62e6d0e89f331e5) | 2020-05-05  | Support num_retries field in env var for GCP connection (#8700)                                                                                                    |
-| [67caae0f2](https://github.com/apache/airflow/commit/67caae0f25db4eec42b8e81c85683aabdd8d6c1a) | 2020-05-04  | Add system test for gcs_to_bigquery (#8556)                                                                                                                        |
-| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03  | Add system test and docs for Facebook Ads operators (#8503)                                                                                                        |
-| [a28c66f23](https://github.com/apache/airflow/commit/a28c66f23d373cd0f8bfc765a515f21d4b66a0e9) | 2020-04-30  | [AIRFLOW-4734] Upsert functionality for PostgresHook.insert_rows() (#8625)                                                                                         |
-| [992a24ce4](https://github.com/apache/airflow/commit/992a24ce41067d3b73f293878e71835892cbb632) | 2020-04-28  | Split and improve BigQuery example DAG (#8529)                                                                                                                     |
-| [c1fb28230](https://github.com/apache/airflow/commit/c1fb28230fa0d36ef86c452c70254b253a113f9c) | 2020-04-28  | Refactor BigQueryHook dataset operations (#8477)                                                                                                                   |
-| [e8d0f8fea](https://github.com/apache/airflow/commit/e8d0f8feab0ec08e248cd381359112ad6a832f5b) | 2020-04-26  | Improve idempodency in CloudDataTransferServiceCreateJobOperator (#8430)                                                                                           |
-| [37fdfa977](https://github.com/apache/airflow/commit/37fdfa9775f43a5fa15de9c53ab33ecdf97513c5) | 2020-04-26  | [AIRFLOW-6281] Create guide for GCS to GCS transfer operators (#8442)                                                                                              |
-| [14b22e6ff](https://github.com/apache/airflow/commit/14b22e6ffeb3af1f68e8362a1d0061b41364019c) | 2020-04-25  | Add hook and operator for Google Cloud Life Sciences (#8481)                                                                                                       |
-| [72ddc94d1](https://github.com/apache/airflow/commit/72ddc94d1ee08b414102e0b8ac197a3d8e965707) | 2020-04-23  | Pass location using parmamter in Dataflow integration (#8382)                                                                                                      |
-| [912aa4b42](https://github.com/apache/airflow/commit/912aa4b4237695275db6379cf2f0a633ea6087bc) | 2020-04-23  | Added GoogleDisplayVideo360DownloadLineItemsOperator (#8174)                                                                                                       |
-| [57c8c0583](https://github.com/apache/airflow/commit/57c8c05839f66ed2909b1bee8ff6976432db82aa) | 2020-04-22  | Use python client in BQ hook create_empty_table/dataset and table_exists (#8377)                                                                                   |
-| [5d3a7eef3](https://github.com/apache/airflow/commit/5d3a7eef30b30fa466d8173f13abe4c356d73aef) | 2020-04-20  | Allow multiple extra_packages in Dataflow (#8394)                                                                                                                  |
-| [79c99b1b6](https://github.com/apache/airflow/commit/79c99b1b6ae2ff5b0c8ab892f7f3fb1b44724121) | 2020-04-18  | Added location parameter to BigQueryCheckOperator (#8273)                                                                                                          |
-| [79d3f33c1](https://github.com/apache/airflow/commit/79d3f33c1b65c9c7e7b1a75e25d38cab9aa4517f) | 2020-04-17  | Clean up temporary files in Dataflow operators (#8313)                                                                                                             |
-| [efcffa323](https://github.com/apache/airflow/commit/efcffa323ddb5aa9f5907aa86808f3f3b4f5bd87) | 2020-04-16  | Add Dataproc SparkR Example (#8240)                                                                                                                                |
-| [b198a1fa9](https://github.com/apache/airflow/commit/b198a1fa94c44228dc7358552aeb6a5371ae0da2) | 2020-04-15  | Create guide for BigQuery operators (#8276)                                                                                                                        |
-| [2636cc932](https://github.com/apache/airflow/commit/2636cc932c3b156644edd46635cf9ff995c83159) | 2020-04-14  | Raise exception when GCP credential doesn&#39;t support account impersonation (#8213)                                                                                  |
-| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14  | Added Facebook Ads Operator #7887 (#8008)                                                                                                                          |
-| [8cae07ea1](https://github.com/apache/airflow/commit/8cae07ea1873a90516120d9ffbd28e7fdd2f78a4) | 2020-04-14  | fixed typo (#8294)                                                                                                                                                 |
-| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13  | Less aggressive eager upgrade of requirements (#8267)                                                                                                              |
-| [1fd9ed384](https://github.com/apache/airflow/commit/1fd9ed3840361afa1e9456ccb0dfd5a60fba4e85) | 2020-04-13  | Add mypy plugin for decorators. (#8145)                                                                                                                            |
-| [327b0a9f7](https://github.com/apache/airflow/commit/327b0a9f77bbcbe3f977a37de04264c2eff4bee1) | 2020-04-13  | Added GoogleDisplayVideo360UploadLineItemsOperator (#8216)                                                                                                         |
-| [bb5e403a3](https://github.com/apache/airflow/commit/bb5e403a320e7377e5040cb180f61b4f5a9ea558) | 2020-04-10  | Honor schema type for MySQL to GCS data pre-process (#8090)                                                                                                        |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)                                                                                                   |
-| [3fc89f29f](https://github.com/apache/airflow/commit/3fc89f29f5bcd1529089fa6cb9c44843614f9ec5) | 2020-04-06  | [AIRFLOW-7106] Cloud data fusion integration - Allow to pass args to start pipeline (#7849)                                                                        |
-| [7ef75d239](https://github.com/apache/airflow/commit/7ef75d2393f30d155de550e6d1ee8c055e2abfee) | 2020-04-03  | [AIRFLOW-7117] Honor self.schema in sql_to_gcs as schema to upload (#8049)                                                                                         |
-| [ed2bc0057](https://github.com/apache/airflow/commit/ed2bc00576b39a88e3e1fb79092494f4bfdcbf5c) | 2020-04-02  | Add Google Ads list accounts operator (#8007)                                                                                                                      |
-| [3808a6206](https://github.com/apache/airflow/commit/3808a6206e70d4af84b39ea7078df54f02c1435e) | 2020-04-01  | Unify Google class/package names (#8033)                                                                                                                           |
-| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31  | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011)                                                                                        |
-| [8e8978007](https://github.com/apache/airflow/commit/8e897800716c8ccedd1c53f2d083cb295786aa50) | 2020-03-31  | Add more refactor steps for providers.google (#8010)                                                                                                               |
-| [aae3b8fb2](https://github.com/apache/airflow/commit/aae3b8fb27870cb3cfba5ed73e35e08d520ef014) | 2020-03-31  | Individual package READMEs (#8012)                                                                                                                                 |
-| [779023968](https://github.com/apache/airflow/commit/779023968f983c91701f687bc823dc338934cdad) | 2020-03-30  | [AIRFLOW-7075] Operators for storing information from GCS into GA (#7743)                                                                                          |
-| [49abce521](https://github.com/apache/airflow/commit/49abce52178c81954f8a25608f70ffe02fcf7b19) | 2020-03-30  | Improve system tests for Cloud Build (#8003)                                                                                                                       |
-| [0f19a930d](https://github.com/apache/airflow/commit/0f19a930d1a7dec2a96bab0de144829f83cc0626) | 2020-03-29  | Remove GKEStartPodOperator when backporting (#7908)                                                                                                                |
-| [0e1c238b2](https://github.com/apache/airflow/commit/0e1c238b2fff3a092c93368125bc8d82abc4b308) | 2020-03-28  | Get Airflow Variables from GCP Secrets Manager (#7946)                                                                                                             |
-| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28  | Make BaseSecretsBackend.build_path generic (#7948)                                                                                                                 |
-| [01f99426f](https://github.com/apache/airflow/commit/01f99426fddd2a24552f352edcb271fa78cf3b15) | 2020-03-28  | Add download/upload operators for GCS and Google Sheets (#7866)                                                                                                    |
-| [892522f8e](https://github.com/apache/airflow/commit/892522f8e2aeedc1ad842a08aaea967b0cae077f) | 2020-03-26  | Change signature of GSheetsHook methods (#7853)                                                                                                                    |
-| [bfd425157](https://github.com/apache/airflow/commit/bfd425157a746402b516f8fc9e48f4ddccd794ce) | 2020-03-26  | Improve idempotency in MLEngineHook.create_model (#7811)                                                                                                           |
-| [f9c226343](https://github.com/apache/airflow/commit/f9c226343d94a7732da280d1dd086bf1ba291c77) | 2020-03-26  | Fix CloudSecretsManagerBackend invalid connections_prefix (#7861)                                                                                                  |
-| [e3920f12f](https://github.com/apache/airflow/commit/e3920f12f483b53950507c50f6ab6a4318072859) | 2020-03-26  | Improve setUp/tearDown in Cloud Firestore system test (#7862)                                                                                                      |
-| [8ba8a7295](https://github.com/apache/airflow/commit/8ba8a7295a31f6b44894bfcaea36fa93b8d8c0d0) | 2020-03-26  | Improve example DAGs for Cloud Memorystore (#7855)                                                                                                                 |
-| [f7d1a437c](https://github.com/apache/airflow/commit/f7d1a437c17461b5ab768b75d58f0cb026b2a818) | 2020-03-26  | Fix CloudMemorystoreCreateInstanceAndImportOperator operator (#7856)                                                                                               |
-| [beef6c230](https://github.com/apache/airflow/commit/beef6c230e4ff266af7c16b639bfda659b2bf6c0) | 2020-03-26  | Improve authorization in GCP system tests (#7863)                                                                                                                  |
-| [5f165f3e4](https://github.com/apache/airflow/commit/5f165f3e4231ebd420ce643211a93e1fecf4877e) | 2020-03-26  | [AIRFLOW-5801] Get GCP credentials from file instead of JSON blob (#7869)                                                                                          |
-| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25  | Standardize SecretBackend class names (#7846)                                                                                                                      |
-| [1982c3fdc](https://github.com/apache/airflow/commit/1982c3fdca1f04cfc41fc5b5e285d8f01c6b76ab) | 2020-03-24  | Run Dataflow for ML Engine summary in venv (#7809)                                                                                                                 |
-| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23  | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830)                                                                                                     |
-| [529db07b2](https://github.com/apache/airflow/commit/529db07b2ee73d886e37e8b3415462c730187b15) | 2020-03-23  | Improve Google PubSub hook publish method (#7831)                                                                                                                  |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                                                                                   |
-| [a001489b5](https://github.com/apache/airflow/commit/a001489b5928ebfc35f990a29d1c9c2ecb80bd61) | 2020-03-23  | Improve example DAG for ML Engine (#7810)                                                                                                                          |
-| [9e5a8e7f8](https://github.com/apache/airflow/commit/9e5a8e7f83cf2368315fce62f8d81304f7ba2f04) | 2020-03-23  | Add call to Super class in &#39;google&#39; providers (#7823)                                                                                                              |
-| [b86bf79bf](https://github.com/apache/airflow/commit/b86bf79bff615e61de98bead4d02eace5690d5fb) | 2020-03-23  | Fix typo in GCP credentials_provider&#39;s docstring (#7818)                                                                                                           |
-| [56c013ce9](https://github.com/apache/airflow/commit/56c013ce922eb18e5f7dd4410986afbcc6f29025) | 2020-03-23  | Add missing docstring in BigQueryHook.create_empty_table (#7817)                                                                                                   |
-| [426a79847](https://github.com/apache/airflow/commit/426a79847ced832ca3f67c135fd8830ebf1de7d2) | 2020-03-23  | Imrove support for laatest API in  MLEngineStartTrainingJobOperator (#7812)                                                                                        |
-| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23  | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795)                                                                                                  |
-| [27dac00e1](https://github.com/apache/airflow/commit/27dac00e125b87626a0b87074d61e6d38031bf47) | 2020-03-22  | [AIRFLOW-7099] Improve system test for cloud transfer service (#7794)                                                                                              |
-| [0daf5d729](https://github.com/apache/airflow/commit/0daf5d729acef4e9aef5226452dff774e80430cd) | 2020-03-22  | Add ability to specify a maximum modified time for objects in GCSToGCSOperator (#7791)                                                                             |
-| [c8088c2bd](https://github.com/apache/airflow/commit/c8088c2bd70a16605a5d4b1a66a22309359d6712) | 2020-03-20  | [AIRFLOW-7100] Add GoogleAnalyticsGetAdsLinkOperator (#7781)                                                                                                       |
-| [5106a2931](https://github.com/apache/airflow/commit/5106a29314b413d168bcba7a64bf91c04fdb5dfe) | 2020-03-20  | [AIRFLOW-6752] Add GoogleAnalyticsRetrieveAdsLinksListOperator (#7748)                                                                                             |
-| [759ce2a80](https://github.com/apache/airflow/commit/759ce2a80c95832fe4773c9f4fde23e1b03cbc6f) | 2020-03-20  | [AIRFLOW-6978] Add PubSubPullOperator (#7766)                                                                                                                      |
-| [6b9b214e4](https://github.com/apache/airflow/commit/6b9b214e4c3b3afa8ea2e1a5c1e24993013d60ac) | 2020-03-20  | [AIRFLOW-6732] Add GoogleAdsHook and GoogleAdsToGcsOperator (#7692)                                                                                                |
-| [b11891696](https://github.com/apache/airflow/commit/b11891696946d1461174b385c88d6af8abb99768) | 2020-03-19  | [AIRFLOW-7069] Fix cloudsql system tests (#7770)                                                                                                                   |
-| [ae854cae5](https://github.com/apache/airflow/commit/ae854cae5a2cf8cae37edf7e0813ad01bccfbc30) | 2020-03-19  | [AIRFLOW-7082] Remove catch_http_exception decorator in GCP hooks (#7756)                                                                                          |
-| [7e1e954d2](https://github.com/apache/airflow/commit/7e1e954d23ce272b0a71188f0f535e20d54be443) | 2020-03-19  | [AIRFLOW-7085] Cache credentials, project_id in GCP Base Hook (#7759)                                                                                              |
-| [6e21c139b](https://github.com/apache/airflow/commit/6e21c139b3cce3f895040939f0b02e3e0ba36141) | 2020-03-19  | [AIRFLOW-XXXX] Fix reference to GCP classes in guides (#7762)                                                                                                      |
-| [ce022a3f7](https://github.com/apache/airflow/commit/ce022a3f72b7735087d4c3bbe81d293a0ab75327) | 2020-03-19  | [AIRFLOW-XXXX] Add cross-references for operators guide (#7760)                                                                                                    |
-| [029c84e55](https://github.com/apache/airflow/commit/029c84e5527b6db6bdbdbe026f455da325bedef3) | 2020-03-18  | [AIRFLOW-5421] Add Presto to GCS transfer operator (#7718)                                                                                                         |
-| [63a3102ed](https://github.com/apache/airflow/commit/63a3102ede8fb8f764d251b20cad5ee5bef84f50) | 2020-03-18  | [AIRFLOW-7064] Add CloudFirestoreExportDatabaseOperator (#7725)                                                                                                    |
-| [73305c7bd](https://github.com/apache/airflow/commit/73305c7bd57f14444804c13b8b290f479832d3db) | 2020-03-18  | [AIRFLOW-7081] Remove env variables from GCP guide (#7755)                                                                                                         |
-| [60fdbf6d9](https://github.com/apache/airflow/commit/60fdbf6d9255d34a8967400e9585b1cd5d29d3e9) | 2020-03-18  | [AIRFLOW-5610] Add ability to specify multiple objects to copy in GCSToGCSOperator (#7728)                                                                         |
-| [de7e934ca](https://github.com/apache/airflow/commit/de7e934ca3f21ce82f67accf92811b3ac044476f) | 2020-03-17  | [AIRFLOW-7079] Remove redundant code for storing template_fields (#7750)                                                                                           |
-| [0de0347b2](https://github.com/apache/airflow/commit/0de0347b27a961c46ee49da6dfa9205321657749) | 2020-03-17  | [AIRFLOW-6855]: Escape project_dataset_table in SQL query in gcs to bq … (#7475)                                                                                   |
-| [91557c6f8](https://github.com/apache/airflow/commit/91557c6f87529c010b8ad1110ece35fd7fd751e4) | 2020-03-17  | [AIRFLOW-7073] GKEStartPodOperator always use connection credentials (#7738)                                                                                       |
-| [51161dbd9](https://github.com/apache/airflow/commit/51161dbd9de0c966016cec4d5036877890daee7c) | 2020-03-16  | [AIRFLOW-5664] Store timestamps with microseconds precision (#6354)                                                                                                |
-| [2bc020c43](https://github.com/apache/airflow/commit/2bc020c43112dd3a769311de8d5012e8e8f399ee) | 2020-03-14  | [AIRFLOW-7055] Verbose logging option for google provider (#7711)                                                                                                  |
-| [c997cab42](https://github.com/apache/airflow/commit/c997cab42d8695ac444e63dfe4b948a7ea82ed89) | 2020-03-13  | [AIRFLOW-6724] Add Google Analytics 360 Accounts Retrieve Operator (#7630)                                                                                         |
-| [137896f32](https://github.com/apache/airflow/commit/137896f326cd29b59902a887e4c4e58f940ff62b) | 2020-03-12  | [AIRFLOW-7034] Remove feature: Assigning Dag to task using Bitshift Op (#7685)                                                                                     |
-| [1f77f943d](https://github.com/apache/airflow/commit/1f77f943d5d85f66b6a988e8ef6506525eaf4732) | 2020-03-10  | [AIRFLOW-6980] Improve system tests and building providers package (#7615)                                                                                         |
-| [bf9b6b6d7](https://github.com/apache/airflow/commit/bf9b6b6d70455352bbf807871c8eeb6324be7e54) | 2020-03-09  | [AIRFLOW-5013] Add GCP Data Catalog Hook and operators (#7664)                                                                                                     |
-| [e5130dc9f](https://github.com/apache/airflow/commit/e5130dc9fe89187e95071e678ea3b46600866762) | 2020-03-09  | [AIRFLOW-2911] Add job cancellation capability to Dataflow service (#7659)                                                                                         |
-| [faf0df4b9](https://github.com/apache/airflow/commit/faf0df4b9460b7f037ee390addbd2c6effcae013) | 2020-03-09  | [AIRFLOW-XXXX] Fix upsert operator in BQ example DAG (#7666)                                                                                                       |
-| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07  | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506)                                                                                                   |
-| [b5b9795f0](https://github.com/apache/airflow/commit/b5b9795f0446bb484a91ee485f49ea456f1c26c4) | 2020-03-07  | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (fix) (#7624)                                                                                               |
-| [6b65038fb](https://github.com/apache/airflow/commit/6b65038fb409ba1040e70305444816d8f5cfdc47) | 2020-03-06  | [AIRFLOW-6990] Improve system tests for Google Marketing Platform (#7631)                                                                                          |
-| [755fe5224](https://github.com/apache/airflow/commit/755fe52249ba1cd965cf2f87fa7a428b8197a38a) | 2020-03-05  | [AIRFLOW-6915] Add AI Platform Console Link for MLEngineStartTrainingJobOperator (#7535)                                                                           |
-| [cb2f33911](https://github.com/apache/airflow/commit/cb2f339116cf2093da447748892fac68aecbb888) | 2020-03-04  | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (#7609)                                                                                                     |
-| [09fea3ce8](https://github.com/apache/airflow/commit/09fea3ce8e4d7816281963bb8f2cb06f4de6db5c) | 2020-03-04  | [AIRFLOW-6977] Fix BigQuery DTS example DAG (#7612)                                                                                                                |
-| [8230ccc48](https://github.com/apache/airflow/commit/8230ccc48b157c89b2b893d42c6fe1523b83363a) | 2020-03-04  | [AIRFLOW-6926] Fix Google Tasks operators return types and idempotency (#7547)                                                                                     |
-| [0d1e3088a](https://github.com/apache/airflow/commit/0d1e3088aa9f16eaeeb7b18eccec8f35c79a53df) | 2020-03-04  | [AIRFLOW-6970] Improve GCP Video Intelligence system tests (#7604)                                                                                                 |
-| [ab6bb0012](https://github.com/apache/airflow/commit/ab6bb0012c38740b76e864d42d299c5c7a9972a3) | 2020-03-03  | [AIRFLOW-6971] Fix return type in CloudSpeechToTextRecognizeSpeechOperator (#7607)                                                                                 |
-| [3db4ade3d](https://github.com/apache/airflow/commit/3db4ade3dc9660c21c28187100a22008552f2bd3) | 2020-02-29  | [AIRFLOW-6924] Fix Google DLP operators return types (#7546)                                                                                                       |
-| [008b4bab1](https://github.com/apache/airflow/commit/008b4bab14222da068b737d6332db4963b994007) | 2020-02-27  | [AIRFLOW-6730] Use total_seconds instead of seconds (#7363)                                                                                                        |
-| [bb552b2d9](https://github.com/apache/airflow/commit/bb552b2d9fd595cc3eb1b3a2f637f29b814878d7) | 2020-02-25  | [AIRFLOW-6908] Lazy load AirflowException (#7528)                                                                                                                  |
-| [d1a34246a](https://github.com/apache/airflow/commit/d1a34246ac593901f8599b102dc3d7efa4dd61e4) | 2020-02-25  | [AIRFLOW-6593] Add GCP Stackdriver Alerting Hooks and Operators (#7322)                                                                                            |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [dcf874352](https://github.com/apache/airflow/commit/dcf87435219307d4e916a8abc2b819ad75e2b1cf) | 2020-02-24  | [AIRFLOW-6894] Prevent db query in example_dags (#7516)                                                                                                            |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [35b961637](https://github.com/apache/airflow/commit/35b9616378d1cfba7c2eb3c71e20acb6734b7c77) | 2020-02-21  | [AIRFLOW-4973] Add Cloud Data Fusion Pipeline integration (#7486)                                                                                                  |
-| [aff3a361b](https://github.com/apache/airflow/commit/aff3a361b4092212c0757f9ce88fa2e40d25d1f4) | 2020-02-20  | [AIRFLOW-6558] Campaign Manager operators for conversions (#7420)                                                                                                  |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412)                                                                           |
-| [5b199cb86](https://github.com/apache/airflow/commit/5b199cb86be5b1aefbd8620185033d6f635713c1) | 2020-02-17  | [AIRFLOW-XXXX] Typo in example_bigquery DAG (#7429)                                                                                                                |
-| [2c9345a8e](https://github.com/apache/airflow/commit/2c9345a8e03d37a2676efa2f2ea7e8b7814c5345) | 2020-02-17  | [AIRFLOW-6759] Added MLEngine operator/hook to cancel MLEngine jobs (#7400)                                                                                        |
-| [946bdc23c](https://github.com/apache/airflow/commit/946bdc23c039637b0383e1269f99bdd1b2426565) | 2020-02-16  | [AIRFLOW-6405] Add GCP BigQuery Table Upsert Operator (#7126)                                                                                                      |
-| [2381c820c](https://github.com/apache/airflow/commit/2381c820c8aaeffc1c9b4ed47832038833400eb8) | 2020-02-13  | [AIRFLOW-6505] Let emoji encoded properly for json.dumps() (#7399)                                                                                                 |
-| [04c1fefbf](https://github.com/apache/airflow/commit/04c1fefbf26a73ed13881d2ec14eada48028ff72) | 2020-02-03  | [AIRFLOW-6676] added GCSDeleteBucketOperator (#7307)                                                                                                               |
-| [a0252748f](https://github.com/apache/airflow/commit/a0252748ff312daede15c6f0a3d39e16c774461c) | 2020-02-03  | [AIRFLOW-6717] Remove non-existent field from templated_fields (#7340)                                                                                             |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [9d8d07557](https://github.com/apache/airflow/commit/9d8d0755789d4aeadc5d3015f3cdde62901f85b8) | 2020-02-03  | [AIRFLOW-6715] Fix Google Cloud DLP Example DAG (#7337)                                                                                                            |
-| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02  | [AIRFLOW-6708] Set unique logger names (#7330)                                                                                                                     |
-| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30  | [AIRFLOW-6682] Move GCP classes to providers package (#7295)                                                                                                       |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29  | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286)                                                                        |
-| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28  | [AIRFLOW-6656] Fix AIP-21 moving (#7272)                                                                                                                           |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)                                                                                           |
-| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21  | [AIRFLOW-6610] Move software classes to providers package (#7231)                                                                                                  |
-| [f4d3e5e54](https://github.com/apache/airflow/commit/f4d3e5e54507f52a00a9b95aa48eb0260e17224d) | 2020-01-13  | [AIRFLOW-6102] [AIP-21] Rename Dataproc operators (#7151)                                                                                                          |
-| [e7bf8ecb4](https://github.com/apache/airflow/commit/e7bf8ecb48f0299af8091433535ac573c2afd1cf) | 2020-01-13  | [AIRFLOW-6119] [AIP-21] Rename GCS operators, hooks and sensors (#7125)                                                                                            |
-| [5b6772cb8](https://github.com/apache/airflow/commit/5b6772cb8391b248cb4b7be5fd3d5c035280fac1) | 2020-01-09  | [AIRFLOW-6125] [AIP-21] Rename S3 operator and SFTP operator (#7112)                                                                                               |
-| [4f8592ae8](https://github.com/apache/airflow/commit/4f8592ae8f52ab7f42623d3b43eef0928c9aafb2) | 2020-01-08  | [AIRFLOW-6118] [AIP-21] Rename Pubsub operators and hook (#7046)                                                                                                   |
-| [20299473f](https://github.com/apache/airflow/commit/20299473f11add6531f607256ee8a0f7f9507ab8) | 2020-01-03  | [AIRFLOW-6115] [AIP-21] Rename GCP vision operators (#7020)                                                                                                        |
-| [18e8cea4e](https://github.com/apache/airflow/commit/18e8cea4e7487a7dfefc03661e5ebe54c4104ead) | 2020-01-03  | [AIRFLOW-6428] Fix import path for airflow.utils.dates.days_ago in Example DAGs (#7007)                                                                            |
-| [95087af14](https://github.com/apache/airflow/commit/95087af14091f28a83ced8ff1860b86dfd93f93d) | 2019-12-31  | [AIRFLOW-6110] [AIP-21] Rename natural_language service (#6968)                                                                                                    |
-| [69629a5a9](https://github.com/apache/airflow/commit/69629a5a948ab2c4ac04a4a4dca6ac86d19c11bd) | 2019-12-09  | [AIRFLOW-5807] Move SFTP from contrib to providers. (#6464)                                                                                                        |
-| [25e9047a4](https://github.com/apache/airflow/commit/25e9047a4a4da5fad4f85c366e3a6262c0a4f68e) | 2019-12-09  | [AIRFLOW-6193] Do not use asserts in Airflow main code (#6749)                                                                                                     |
-| [ed0a14f32](https://github.com/apache/airflow/commit/ed0a14f321b9dab3554ae395c11c147258536ce8) | 2019-12-09  | [AIRFLOW-6120] Rename GoogleCloudBaseHook (#6734)                                                                                                                  |
-| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01  | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701)                                                                                                  |
-| [03c870a61](https://github.com/apache/airflow/commit/03c870a6172ab232af6319a30ad8d46622359b10) | 2019-11-26  | [AIRFLOW-6010] Remove cyclic imports and pylint hacks (#6601)                                                                                                      |
-| [5c4cfea8c](https://github.com/apache/airflow/commit/5c4cfea8c0f488496c1cbcc4c6c5db13d8210979) | 2019-11-15  | [AIRFLOW-5718] Add SFTPToGoogleCloudStorageOperator (#6393)                                                                                                        |
-| [44a8c37a9](https://github.com/apache/airflow/commit/44a8c37a9a8668469aa825ad21057cca6ac2c186) | 2019-11-13  | [AIRFLOW-XXX] Fix the docstring for Dataproc get_job method (#6581)                                                                                                |
-| [d633d3ac4](https://github.com/apache/airflow/commit/d633d3ac44c395e6c43cd388f98fba1ce1c435a3) | 2019-11-13  | [AIRFLOW-5691] Rewrite Dataproc operators to use python library (#6371)                                                                                            |
-| [d985c02d9](https://github.com/apache/airflow/commit/d985c02d9fa3d9ec946abc1735b0551fd61fb9f0) | 2019-11-05  | [AIRFLOW-XXX] Add How-To-Guide to GCP PubSub (#6497)                                                                                                               |
-| [a296cdabd](https://github.com/apache/airflow/commit/a296cdabdb9c9c65cf9a48329cb776aed5c82d43) | 2019-11-04  | [AIRFLOW-5743] Move Google PubSub to providers package (#6476)                                                                                                     |
-| [470b2a779](https://github.com/apache/airflow/commit/470b2a779d031406a3d5925f2fa2ec40e5c3bccb) | 2019-10-30  | [AIRFLOW-5741] Move Cloud Natural Language to providers (#6421)                                                                                                    |
-| [f2caa451f](https://github.com/apache/airflow/commit/f2caa451fc2b8ee59163314f9ec1cc372acbadf1) | 2019-10-27  | [AIRFLOW-5742] Move Google Cloud Vision to providers package (#6424)                                                                                               |
-| [16d7accb2](https://github.com/apache/airflow/commit/16d7accb22c866d4fbf368e4d979dc1c4a41d93c) | 2019-10-22  | [AIRFLOW-4971] Add Google Display &amp; Video 360 integration (#6170)                                                                                                  |
-| [4e661f535](https://github.com/apache/airflow/commit/4e661f535dea613f9b2e0075676f9a73a97461fe) | 2019-10-22  | [AIRFLOW-5379] Add Google Search Ads 360 operators (#6228)                                                                                                         |
-| [19e32b4e2](https://github.com/apache/airflow/commit/19e32b4e2c798f662e5d8d1e7c65036c5e7ac125) | 2019-10-18  | [AIRFLOW-5656] Rename provider to providers module (#6333)                                                                                                         |
diff --git a/airflow/providers/google/README.md b/airflow/providers/google/README.md
deleted file mode 100644
index 4fa48de..0000000
--- a/airflow/providers/google/README.md
+++ /dev/null
@@ -1,967 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-google
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Cross provider package dependencies](#cross-provider-package-dependencies)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [New operators](#new-operators)
-        - [Moved operators](#moved-operators)
-    - [Transfer operators](#transfer-operators)
-        - [New transfer operators](#new-transfer-operators)
-        - [Moved transfer operators](#moved-transfer-operators)
-    - [Sensors](#sensors)
-        - [New sensors](#new-sensors)
-        - [Moved sensors](#moved-sensors)
-    - [Hooks](#hooks)
-        - [New hooks](#new-hooks)
-        - [Moved hooks](#moved-hooks)
-    - [Secrets](#secrets)
-        - [Moved secrets](#moved-secrets)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `google` provider. All classes for this provider package
-are in `airflow.providers.google` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-google`
-
-## PIP requirements
-
-| PIP package                        | Version required   |
-|:-----------------------------------|:-------------------|
-| PyOpenSSL                          |                    |
-| google-ads                         | &gt;=4.0.0,&lt;8.0.0     |
-| google-api-python-client           | &gt;=1.6.0,&lt;2.0.0     |
-| google-auth                        | &gt;=1.0.0,&lt;2.0.0     |
-| google-auth-httplib2               | &gt;=0.0.1            |
-| google-cloud-automl                | &gt;=0.4.0,&lt;2.0.0     |
-| google-cloud-bigquery-datatransfer | &gt;=0.4.0,&lt;2.0.0     |
-| google-cloud-bigtable              | &gt;=1.0.0,&lt;2.0.0     |
-| google-cloud-container             | &gt;=0.1.1,&lt;2.0.0     |
-| google-cloud-datacatalog           | &gt;=0.5.0, &lt;0.8      |
-| google-cloud-dataproc              | &gt;=1.0.1,&lt;2.0.0     |
-| google-cloud-dlp                   | &gt;=0.11.0,&lt;2.0.0    |
-| google-cloud-kms                   | &gt;=1.2.1,&lt;2.0.0     |
-| google-cloud-language              | &gt;=1.1.1,&lt;2.0.0     |
-| google-cloud-logging               | &gt;=1.14.0,&lt;2.0.0    |
-| google-cloud-memcache              | &gt;=0.2.0            |
-| google-cloud-monitoring            | &gt;=0.34.0,&lt;2.0.0    |
-| google-cloud-os-login              | &gt;=1.0.0,&lt;2.0.0     |
-| google-cloud-pubsub                | &gt;=1.0.0,&lt;2.0.0     |
-| google-cloud-redis                 | &gt;=0.3.0,&lt;2.0.0     |
-| google-cloud-secret-manager        | &gt;=0.2.0,&lt;2.0.0     |
-| google-cloud-spanner               | &gt;=1.10.0,&lt;2.0.0    |
-| google-cloud-speech                | &gt;=0.36.3,&lt;2.0.0    |
-| google-cloud-storage               | &gt;=1.16,&lt;2.0.0      |
-| google-cloud-tasks                 | &gt;=1.2.1,&lt;2.0.0     |
-| google-cloud-texttospeech          | &gt;=0.4.0,&lt;2.0.0     |
-| google-cloud-translate             | &gt;=1.5.0,&lt;2.0.0     |
-| google-cloud-videointelligence     | &gt;=1.7.0,&lt;2.0.0     |
-| google-cloud-vision                | &gt;=0.35.2,&lt;2.0.0    |
-| grpcio-gcp                         | &gt;=0.2.2            |
-| pandas-gbq                         |                    |
-
-## Cross provider package dependencies
-
-Those are dependencies that might be needed in order to use all the features of the package.
-You need to install the specified backport providers package in order to use them.
-
-You can install such cross-provider dependencies when installing from PyPI. For example:
-
-```bash
-pip install apache-airflow-providers-google[amazon]
-```
-
-| Dependent package                                                                                               | Extra            |
-|:----------------------------------------------------------------------------------------------------------------|:-----------------|
-| [apache-airflow-providers-amazon](https://pypi.org/project/apache-airflow-providers-amazon)                     | amazon           |
-| [apache-airflow-providers-apache-cassandra](https://pypi.org/project/apache-airflow-providers-apache-cassandra) | apache.cassandra |
-| [apache-airflow-providers-cncf-kubernetes](https://pypi.org/project/apache-airflow-providers-cncf-kubernetes)   | cncf.kubernetes  |
-| [apache-airflow-providers-facebook](https://pypi.org/project/apache-airflow-providers-facebook)                 | facebook         |
-| [apache-airflow-providers-microsoft-azure](https://pypi.org/project/apache-airflow-providers-microsoft-azure)   | microsoft.azure  |
-| [apache-airflow-providers-microsoft-mssql](https://pypi.org/project/apache-airflow-providers-microsoft-mssql)   | microsoft.mssql  |
-| [apache-airflow-providers-mysql](https://pypi.org/project/apache-airflow-providers-mysql)                       | mysql            |
-| [apache-airflow-providers-postgres](https://pypi.org/project/apache-airflow-providers-postgres)                 | postgres         |
-| [apache-airflow-providers-presto](https://pypi.org/project/apache-airflow-providers-presto)                     | presto           |
-| [apache-airflow-providers-salesforce](https://pypi.org/project/apache-airflow-providers-salesforce)             | salesforce       |
-| [apache-airflow-providers-sftp](https://pypi.org/project/apache-airflow-providers-sftp)                         | sftp             |
-| [apache-airflow-providers-ssh](https://pypi.org/project/apache-airflow-providers-ssh)                           | ssh              |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `google` provider
-are in the `airflow.providers.google` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-### New operators
-
-| New Airflow 2.0 operators: `airflow.providers.google` package                                                                                                                                                                |
-|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [ads.operators.ads.GoogleAdsListAccountsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/operators/ads.py)                                                                               |
-| [cloud.operators.automl.AutoMLBatchPredictOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                        |
-| [cloud.operators.automl.AutoMLCreateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                       |
-| [cloud.operators.automl.AutoMLDeleteDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                       |
-| [cloud.operators.automl.AutoMLDeleteModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                         |
-| [cloud.operators.automl.AutoMLDeployModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                         |
-| [cloud.operators.automl.AutoMLGetModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                            |
-| [cloud.operators.automl.AutoMLImportDataOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                          |
-| [cloud.operators.automl.AutoMLListDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                         |
-| [cloud.operators.automl.AutoMLPredictOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                             |
-| [cloud.operators.automl.AutoMLTablesListColumnSpecsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                               |
-| [cloud.operators.automl.AutoMLTablesListTableSpecsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                |
-| [cloud.operators.automl.AutoMLTablesUpdateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                 |
-| [cloud.operators.automl.AutoMLTrainModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                          |
-| [cloud.operators.bigquery.BigQueryInsertJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                     |
-| [cloud.operators.bigquery_dts.BigQueryCreateDataTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py)                                                    |
-| [cloud.operators.bigquery_dts.BigQueryDataTransferServiceStartTransferRunsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py)                                  |
-| [cloud.operators.bigquery_dts.BigQueryDeleteDataTransferConfigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py)                                              |
-| [cloud.operators.bigtable.BigtableUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py)                                                                |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreCreateInstanceAndImportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                             |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                      |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                      |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreExportAndDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                             |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreExportInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                      |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreFailoverInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                    |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreGetInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                         |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreImportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                              |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreListInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                       |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedApplyParametersOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                            |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                             |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                             |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedGetInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedListInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                              |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                             |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedUpdateParametersOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                           |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreScaleInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                       |
-| [cloud.operators.cloud_memorystore.CloudMemorystoreUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                      |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)          |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceS3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)           |
-| [cloud.operators.datacatalog.CloudDataCatalogCreateEntryGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                |
-| [cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                     |
-| [cloud.operators.datacatalog.CloudDataCatalogCreateTagOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                       |
-| [cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                          |
-| [cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                               |
-| [cloud.operators.datacatalog.CloudDataCatalogDeleteEntryGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                |
-| [cloud.operators.datacatalog.CloudDataCatalogDeleteEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                     |
-| [cloud.operators.datacatalog.CloudDataCatalogDeleteTagOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                       |
-| [cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                          |
-| [cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                               |
-| [cloud.operators.datacatalog.CloudDataCatalogGetEntryGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                   |
-| [cloud.operators.datacatalog.CloudDataCatalogGetEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                        |
-| [cloud.operators.datacatalog.CloudDataCatalogGetTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                  |
-| [cloud.operators.datacatalog.CloudDataCatalogListTagsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                        |
-| [cloud.operators.datacatalog.CloudDataCatalogLookupEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                     |
-| [cloud.operators.datacatalog.CloudDataCatalogRenameTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                          |
-| [cloud.operators.datacatalog.CloudDataCatalogSearchCatalogOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                   |
-| [cloud.operators.datacatalog.CloudDataCatalogUpdateEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                     |
-| [cloud.operators.datacatalog.CloudDataCatalogUpdateTagOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                       |
-| [cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                          |
-| [cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                               |
-| [cloud.operators.dataflow.DataflowStartFlexTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py)                                                             |
-| [cloud.operators.dataflow.DataflowStartSqlJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py)                                                                   |
-| [cloud.operators.datafusion.CloudDataFusionCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py)                                                     |
-| [cloud.operators.datafusion.CloudDataFusionCreatePipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py)                                                     |
-| [cloud.operators.datafusion.CloudDataFusionDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py)                                                     |
-| [cloud.operators.datafusion.CloudDataFusionDeletePipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py)                                                     |
-| [cloud.operators.datafusion.CloudDataFusionGetInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py)                                                        |
-| [cloud.operators.datafusion.CloudDataFusionListPipelinesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py)                                                      |
-| [cloud.operators.datafusion.CloudDataFusionRestartInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py)                                                    |
-| [cloud.operators.datafusion.CloudDataFusionStartPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py)                                                      |
-| [cloud.operators.datafusion.CloudDataFusionStopPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py)                                                       |
-| [cloud.operators.datafusion.CloudDataFusionUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py)                                                     |
-| [cloud.operators.dataprep.DataprepGetJobGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataprep.py)                                                                   |
-| [cloud.operators.dataprep.DataprepGetJobsForJobGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataprep.py)                                                            |
-| [cloud.operators.dataprep.DataprepRunJobGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataprep.py)                                                                   |
-| [cloud.operators.dataproc.DataprocSubmitJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                                     |
-| [cloud.operators.dataproc.DataprocUpdateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                                 |
-| [cloud.operators.datastore.CloudDatastoreAllocateIdsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py)                                                           |
-| [cloud.operators.datastore.CloudDatastoreBeginTransactionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py)                                                      |
-| [cloud.operators.datastore.CloudDatastoreCommitOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py)                                                                |
-| [cloud.operators.datastore.CloudDatastoreDeleteOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py)                                                       |
-| [cloud.operators.datastore.CloudDatastoreGetOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py)                                                          |
-| [cloud.operators.datastore.CloudDatastoreRollbackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py)                                                              |
-| [cloud.operators.datastore.CloudDatastoreRunQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py)                                                              |
-| [cloud.operators.functions.CloudFunctionInvokeFunctionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/functions.py)                                                         |
-| [cloud.operators.gcs.GCSDeleteBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                                 |
-| [cloud.operators.gcs.GCSFileTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                                |
-| [cloud.operators.gcs.GCSSynchronizeBucketsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                           |
-| [cloud.operators.life_sciences.LifeSciencesRunPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/life_sciences.py)                                                     |
-| [cloud.operators.mlengine.MLEngineCreateModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                                   |
-| [cloud.operators.mlengine.MLEngineCreateVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                                 |
-| [cloud.operators.mlengine.MLEngineDeleteModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                                   |
-| [cloud.operators.mlengine.MLEngineDeleteVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                                 |
-| [cloud.operators.mlengine.MLEngineGetModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                                      |
-| [cloud.operators.mlengine.MLEngineListVersionsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                                  |
-| [cloud.operators.mlengine.MLEngineSetDefaultVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                             |
-| [cloud.operators.mlengine.MLEngineTrainingCancelJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                             |
-| [cloud.operators.pubsub.PubSubPullOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                                |
-| [cloud.operators.stackdriver.StackdriverDeleteAlertOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                                          |
-| [cloud.operators.stackdriver.StackdriverDeleteNotificationChannelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                            |
-| [cloud.operators.stackdriver.StackdriverDisableAlertPoliciesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                                 |
-| [cloud.operators.stackdriver.StackdriverDisableNotificationChannelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                          |
-| [cloud.operators.stackdriver.StackdriverEnableAlertPoliciesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                                  |
-| [cloud.operators.stackdriver.StackdriverEnableNotificationChannelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                           |
-| [cloud.operators.stackdriver.StackdriverListAlertPoliciesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                                    |
-| [cloud.operators.stackdriver.StackdriverListNotificationChannelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                             |
-| [cloud.operators.stackdriver.StackdriverUpsertAlertOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                                          |
-| [cloud.operators.stackdriver.StackdriverUpsertNotificationChannelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                            |
-| [cloud.operators.tasks.CloudTasksQueueCreateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                       |
-| [cloud.operators.tasks.CloudTasksQueueDeleteOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                       |
-| [cloud.operators.tasks.CloudTasksQueueGetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                          |
-| [cloud.operators.tasks.CloudTasksQueuePauseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                        |
-| [cloud.operators.tasks.CloudTasksQueuePurgeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                        |
-| [cloud.operators.tasks.CloudTasksQueueResumeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                       |
-| [cloud.operators.tasks.CloudTasksQueueUpdateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                       |
-| [cloud.operators.tasks.CloudTasksQueuesListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                        |
-| [cloud.operators.tasks.CloudTasksTaskCreateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                        |
-| [cloud.operators.tasks.CloudTasksTaskDeleteOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                        |
-| [cloud.operators.tasks.CloudTasksTaskGetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                           |
-| [cloud.operators.tasks.CloudTasksTaskRunOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                           |
-| [cloud.operators.tasks.CloudTasksTasksListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                         |
-| [cloud.operators.vision.CloudVisionAddProductToProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                         |
-| [cloud.operators.vision.CloudVisionDeleteReferenceImageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                           |
-| [firebase.operators.firestore.CloudFirestoreExportDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/firebase/operators/firestore.py)                                                  |
-| [marketing_platform.operators.analytics.GoogleAnalyticsDataImportUploadOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py)                           |
-| [marketing_platform.operators.analytics.GoogleAnalyticsDeletePreviousDataUploadsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py)                  |
-| [marketing_platform.operators.analytics.GoogleAnalyticsGetAdsLinkOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py)                                 |
-| [marketing_platform.operators.analytics.GoogleAnalyticsListAccountsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py)                               |
-| [marketing_platform.operators.analytics.GoogleAnalyticsModifyFileHeadersDataImportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py)                |
-| [marketing_platform.operators.analytics.GoogleAnalyticsRetrieveAdsLinksListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py)                       |
-| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchInsertConversionsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) |
-| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchUpdateConversionsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) |
-| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerDeleteReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py)           |
-| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerDownloadReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py)         |
-| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerInsertReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py)           |
-| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerRunReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py)              |
-| [marketing_platform.operators.display_video.GoogleDisplayVideo360CreateReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py)                 |
-| [marketing_platform.operators.display_video.GoogleDisplayVideo360CreateSDFDownloadTaskOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py)        |
-| [marketing_platform.operators.display_video.GoogleDisplayVideo360DeleteReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py)                 |
-| [marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadLineItemsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py)            |
-| [marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py)               |
-| [marketing_platform.operators.display_video.GoogleDisplayVideo360RunReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py)                    |
-| [marketing_platform.operators.display_video.GoogleDisplayVideo360SDFtoGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py)                     |
-| [marketing_platform.operators.display_video.GoogleDisplayVideo360UploadLineItemsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py)              |
-| [marketing_platform.operators.search_ads.GoogleSearchAdsDownloadReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/search_ads.py)                           |
-| [marketing_platform.operators.search_ads.GoogleSearchAdsInsertReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/search_ads.py)                             |
-| [suite.operators.sheets.GoogleSheetsCreateSpreadsheetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/operators/sheets.py)                                                             |
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.google` package                                                                                                                                                                  | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                                                                   |
-|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [cloud.operators.bigquery.BigQueryCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                       | [contrib.operators.bigquery_check_operator.BigQueryCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py)                                                    |
-| [cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                          | [contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                   |
-| [cloud.operators.bigquery.BigQueryCreateEmptyTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                            | [contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                     |
-| [cloud.operators.bigquery.BigQueryCreateExternalTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                         | [contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                  |
-| [cloud.operators.bigquery.BigQueryDeleteDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                               | [contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                        |
-| [cloud.operators.bigquery.BigQueryDeleteTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                 | [contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_table_delete_operator.py)                                |
-| [cloud.operators.bigquery.BigQueryExecuteQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                | [contrib.operators.bigquery_operator.BigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                                     |
-| [cloud.operators.bigquery.BigQueryGetDataOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                     | [contrib.operators.bigquery_get_data.BigQueryGetDataOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_get_data.py)                                                              |
-| [cloud.operators.bigquery.BigQueryGetDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                  | [contrib.operators.bigquery_operator.BigQueryGetDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                           |
-| [cloud.operators.bigquery.BigQueryGetDatasetTablesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                            | [contrib.operators.bigquery_operator.BigQueryGetDatasetTablesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                     |
-| [cloud.operators.bigquery.BigQueryIntervalCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                               | [contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py)                                            |
-| [cloud.operators.bigquery.BigQueryPatchDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                | [contrib.operators.bigquery_operator.BigQueryPatchDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                         |
-| [cloud.operators.bigquery.BigQueryUpdateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                               | [contrib.operators.bigquery_operator.BigQueryUpdateDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                        |
-| [cloud.operators.bigquery.BigQueryUpsertTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                 | [contrib.operators.bigquery_operator.BigQueryUpsertTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                          |
-| [cloud.operators.bigquery.BigQueryValueCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                  | [contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py)                                               |
-| [cloud.operators.bigtable.BigtableCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py)                                                              | [contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py)                                               |
-| [cloud.operators.bigtable.BigtableCreateTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py)                                                                 | [contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py)                                                  |
-| [cloud.operators.bigtable.BigtableDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py)                                                              | [contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py)                                               |
-| [cloud.operators.bigtable.BigtableDeleteTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py)                                                                 | [contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py)                                                  |
-| [cloud.operators.bigtable.BigtableUpdateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py)                                                               | [contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py)                                                |
-| [cloud.operators.cloud_build.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_build.py)                                                         | [contrib.operators.gcp_cloud_build_operator.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_cloud_build_operator.py)                                          |
-| [cloud.operators.cloud_sql.CloudSQLBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                                      | [contrib.operators.gcp_sql_operator.CloudSqlBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                                   |
-| [cloud.operators.cloud_sql.CloudSQLCreateInstanceDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                    | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                 |
-| [cloud.operators.cloud_sql.CloudSQLCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                            | [contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                         |
-| [cloud.operators.cloud_sql.CloudSQLDeleteInstanceDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                    | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                 |
-| [cloud.operators.cloud_sql.CloudSQLDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                            | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                         |
-| [cloud.operators.cloud_sql.CloudSQLExecuteQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                              | [contrib.operators.gcp_sql_operator.CloudSqlQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                                  |
-| [cloud.operators.cloud_sql.CloudSQLExportInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                            | [contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                         |
-| [cloud.operators.cloud_sql.CloudSQLImportInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                            | [contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                         |
-| [cloud.operators.cloud_sql.CloudSQLInstancePatchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                             | [contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                          |
-| [cloud.operators.cloud_sql.CloudSQLPatchInstanceDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                     | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                  |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCancelOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationCancelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                    |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCreateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)       | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                          |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceDeleteJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)       | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                          |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGetOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)    | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationGetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                       |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceListOperationsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)  | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationsListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                     |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServicePauseOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)  | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationPauseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                     |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceResumeOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationResumeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                    |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceUpdateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)       | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                          |
-| [cloud.operators.compute.ComputeEngineBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py)                                                                     | [contrib.operators.gcp_compute_operator.GceBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py)                                                                |
-| [cloud.operators.compute.ComputeEngineCopyInstanceTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py)                                                     | [contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py)                                                |
-| [cloud.operators.compute.ComputeEngineInstanceGroupUpdateManagerTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py)                                       | [contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py)                                  |
-| [cloud.operators.compute.ComputeEngineSetMachineTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py)                                                           | [contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py)                                                      |
-| [cloud.operators.compute.ComputeEngineStartInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py)                                                            | [contrib.operators.gcp_compute_operator.GceInstanceStartOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py)                                                       |
-| [cloud.operators.compute.ComputeEngineStopInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py)                                                             | [contrib.operators.gcp_compute_operator.GceInstanceStopOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py)                                                        |
-| [cloud.operators.dataflow.DataflowCreateJavaJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py)                                                               | [contrib.operators.dataflow_operator.DataFlowJavaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataflow_operator.py)                                                                 |
-| [cloud.operators.dataflow.DataflowCreatePythonJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py)                                                             | [contrib.operators.dataflow_operator.DataFlowPythonOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataflow_operator.py)                                                               |
-| [cloud.operators.dataflow.DataflowTemplatedJobStartOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py)                                                           | [contrib.operators.dataflow_operator.DataflowTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataflow_operator.py)                                                             |
-| [cloud.operators.dataproc.DataprocCreateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                               | [contrib.operators.dataproc_operator.DataprocClusterCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                                        |
-| [cloud.operators.dataproc.DataprocDeleteClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                               | [contrib.operators.dataproc_operator.DataprocClusterDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                                        |
-| [cloud.operators.dataproc.DataprocInstantiateInlineWorkflowTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                           | [contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                    |
-| [cloud.operators.dataproc.DataprocInstantiateWorkflowTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                 | [contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                          |
-| [cloud.operators.dataproc.DataprocJobBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                                     | [contrib.operators.dataproc_operator.DataProcJobBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                                              |
-| [cloud.operators.dataproc.DataprocScaleClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                                | [contrib.operators.dataproc_operator.DataprocClusterScaleOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                                         |
-| [cloud.operators.dataproc.DataprocSubmitHadoopJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                             | [contrib.operators.dataproc_operator.DataProcHadoopOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                                               |
-| [cloud.operators.dataproc.DataprocSubmitHiveJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                               | [contrib.operators.dataproc_operator.DataProcHiveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                                                 |
-| [cloud.operators.dataproc.DataprocSubmitPigJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                                | [contrib.operators.dataproc_operator.DataProcPigOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                                                  |
-| [cloud.operators.dataproc.DataprocSubmitPySparkJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                            | [contrib.operators.dataproc_operator.DataProcPySparkOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                                              |
-| [cloud.operators.dataproc.DataprocSubmitSparkJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                              | [contrib.operators.dataproc_operator.DataProcSparkOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                                                |
-| [cloud.operators.dataproc.DataprocSubmitSparkSqlJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                           | [contrib.operators.dataproc_operator.DataProcSparkSqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py)                                                             |
-| [cloud.operators.datastore.CloudDatastoreExportEntitiesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py)                                                      | [contrib.operators.datastore_export_operator.DatastoreExportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/datastore_export_operator.py)                                              |
-| [cloud.operators.datastore.CloudDatastoreImportEntitiesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py)                                                      | [contrib.operators.datastore_import_operator.DatastoreImportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/datastore_import_operator.py)                                              |
-| [cloud.operators.dlp.CloudDLPCancelDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                          | [contrib.operators.gcp_dlp_operator.CloudDLPCancelDLPJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                           |
-| [cloud.operators.dlp.CloudDLPCreateDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                          | [contrib.operators.gcp_dlp_operator.CloudDLPCreateDLPJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                           |
-| [cloud.operators.dlp.CloudDLPCreateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                              | [contrib.operators.gcp_dlp_operator.CloudDLPCreateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                               |
-| [cloud.operators.dlp.CloudDLPCreateInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                 | [contrib.operators.gcp_dlp_operator.CloudDLPCreateInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                  |
-| [cloud.operators.dlp.CloudDLPCreateJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                      | [contrib.operators.gcp_dlp_operator.CloudDLPCreateJobTriggerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                       |
-| [cloud.operators.dlp.CloudDLPCreateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                  | [contrib.operators.gcp_dlp_operator.CloudDLPCreateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                   |
-| [cloud.operators.dlp.CloudDLPDeidentifyContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                     | [contrib.operators.gcp_dlp_operator.CloudDLPDeidentifyContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                      |
-| [cloud.operators.dlp.CloudDLPDeleteDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                          | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteDlpJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                           |
-| [cloud.operators.dlp.CloudDLPDeleteDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                              | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                               |
-| [cloud.operators.dlp.CloudDLPDeleteInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                 | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                  |
-| [cloud.operators.dlp.CloudDLPDeleteJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                      | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteJobTriggerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                       |
-| [cloud.operators.dlp.CloudDLPDeleteStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                  | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                   |
-| [cloud.operators.dlp.CloudDLPGetDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                             | [contrib.operators.gcp_dlp_operator.CloudDLPGetDlpJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                              |
-| [cloud.operators.dlp.CloudDLPGetDLPJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                      | [contrib.operators.gcp_dlp_operator.CloudDLPGetJobTripperOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                          |
-| [cloud.operators.dlp.CloudDLPGetDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                 | [contrib.operators.gcp_dlp_operator.CloudDLPGetDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                  |
-| [cloud.operators.dlp.CloudDLPGetInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                    | [contrib.operators.gcp_dlp_operator.CloudDLPGetInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                     |
-| [cloud.operators.dlp.CloudDLPGetStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                     | [contrib.operators.gcp_dlp_operator.CloudDLPGetStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                      |
-| [cloud.operators.dlp.CloudDLPInspectContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                        | [contrib.operators.gcp_dlp_operator.CloudDLPInspectContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                         |
-| [cloud.operators.dlp.CloudDLPListDLPJobsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                           | [contrib.operators.gcp_dlp_operator.CloudDLPListDlpJobsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                            |
-| [cloud.operators.dlp.CloudDLPListDeidentifyTemplatesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                               | [contrib.operators.gcp_dlp_operator.CloudDLPListDeidentifyTemplatesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                |
-| [cloud.operators.dlp.CloudDLPListInfoTypesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                         | [contrib.operators.gcp_dlp_operator.CloudDLPListInfoTypesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                          |
-| [cloud.operators.dlp.CloudDLPListInspectTemplatesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                  | [contrib.operators.gcp_dlp_operator.CloudDLPListInspectTemplatesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                   |
-| [cloud.operators.dlp.CloudDLPListJobTriggersOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                       | [contrib.operators.gcp_dlp_operator.CloudDLPListJobTriggersOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                        |
-| [cloud.operators.dlp.CloudDLPListStoredInfoTypesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                   | [contrib.operators.gcp_dlp_operator.CloudDLPListStoredInfoTypesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                    |
-| [cloud.operators.dlp.CloudDLPRedactImageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                           | [contrib.operators.gcp_dlp_operator.CloudDLPRedactImageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                            |
-| [cloud.operators.dlp.CloudDLPReidentifyContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                     | [contrib.operators.gcp_dlp_operator.CloudDLPReidentifyContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                      |
-| [cloud.operators.dlp.CloudDLPUpdateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                              | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                               |
-| [cloud.operators.dlp.CloudDLPUpdateInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                 | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                  |
-| [cloud.operators.dlp.CloudDLPUpdateJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                      | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateJobTriggerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                       |
-| [cloud.operators.dlp.CloudDLPUpdateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py)                                                                  | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py)                                                   |
-| [cloud.operators.functions.CloudFunctionDeleteFunctionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/functions.py)                                                       | [contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_function_operator.py)                                                    |
-| [cloud.operators.functions.CloudFunctionDeployFunctionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/functions.py)                                                       | [contrib.operators.gcp_function_operator.GcfFunctionDeployOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_function_operator.py)                                                    |
-| [cloud.operators.gcs.GCSBucketCreateAclEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                       | [contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_acl_operator.py)                                         |
-| [cloud.operators.gcs.GCSCreateBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                               | [contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_operator.py)                                                         |
-| [cloud.operators.gcs.GCSDeleteObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                              | [contrib.operators.gcs_delete_operator.GoogleCloudStorageDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_delete_operator.py)                                                 |
-| [cloud.operators.gcs.GCSListObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                                | [contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_list_operator.py)                                                       |
-| [cloud.operators.gcs.GCSObjectCreateAclEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                       | [contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_acl_operator.py)                                         |
-| [cloud.operators.kubernetes_engine.GKECreateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py)                                                  | [contrib.operators.gcp_container_operator.GKEClusterCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py)                                                   |
-| [cloud.operators.kubernetes_engine.GKEDeleteClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py)                                                  | [contrib.operators.gcp_container_operator.GKEClusterDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py)                                                   |
-| [cloud.operators.kubernetes_engine.GKEStartPodOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py)                                                       | [contrib.operators.gcp_container_operator.GKEPodOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py)                                                             |
-| [cloud.operators.mlengine.MLEngineManageModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                                 | [contrib.operators.mlengine_operator.MLEngineModelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py)                                                                |
-| [cloud.operators.mlengine.MLEngineManageVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                               | [contrib.operators.mlengine_operator.MLEngineVersionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py)                                                              |
-| [cloud.operators.mlengine.MLEngineStartBatchPredictionJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                     | [contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py)                                                      |
-| [cloud.operators.mlengine.MLEngineStartTrainingJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                            | [contrib.operators.mlengine_operator.MLEngineTrainingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py)                                                             |
-| [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitiesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py)                                 | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntitiesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py)                         |
-| [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitySentimentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py)                          | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntitySentimentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py)                  |
-| [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeSentimentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py)                                | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeSentimentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py)                        |
-| [cloud.operators.natural_language.CloudNaturalLanguageClassifyTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py)                                    | [contrib.operators.gcp_natural_language_operator.CloudLanguageClassifyTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py)                            |
-| [cloud.operators.pubsub.PubSubCreateSubscriptionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                | [contrib.operators.pubsub_operator.PubSubSubscriptionCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py)                                                         |
-| [cloud.operators.pubsub.PubSubCreateTopicOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                       | [contrib.operators.pubsub_operator.PubSubTopicCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py)                                                                |
-| [cloud.operators.pubsub.PubSubDeleteSubscriptionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                | [contrib.operators.pubsub_operator.PubSubSubscriptionDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py)                                                         |
-| [cloud.operators.pubsub.PubSubDeleteTopicOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                       | [contrib.operators.pubsub_operator.PubSubTopicDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py)                                                                |
-| [cloud.operators.pubsub.PubSubPublishMessageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                    | [contrib.operators.pubsub_operator.PubSubPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py)                                                                    |
-| [cloud.operators.spanner.SpannerDeleteDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py)                                                         | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py)                                     |
-| [cloud.operators.spanner.SpannerDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py)                                                                 | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py)                                             |
-| [cloud.operators.spanner.SpannerDeployDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py)                                                         | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py)                                     |
-| [cloud.operators.spanner.SpannerDeployInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py)                                                                 | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py)                                             |
-| [cloud.operators.spanner.SpannerQueryDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py)                                                          | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py)                                      |
-| [cloud.operators.spanner.SpannerUpdateDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py)                                                         | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py)                                     |
-| [cloud.operators.speech_to_text.CloudSpeechToTextRecognizeSpeechOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/speech_to_text.py)                                        | [contrib.operators.gcp_speech_to_text_operator.GcpSpeechToTextRecognizeSpeechOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_speech_to_text_operator.py)                           |
-| [cloud.operators.text_to_speech.CloudTextToSpeechSynthesizeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/text_to_speech.py)                                             | [contrib.operators.gcp_text_to_speech_operator.GcpTextToSpeechSynthesizeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_text_to_speech_operator.py)                                |
-| [cloud.operators.translate.CloudTranslateTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/translate.py)                                                                | [contrib.operators.gcp_translate_operator.CloudTranslateTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_translate_operator.py)                                                 |
-| [cloud.operators.translate_speech.CloudTranslateSpeechOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/translate_speech.py)                                                | [contrib.operators.gcp_translate_speech_operator.CloudTranslateSpeechOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_translate_speech_operator.py)                                 |
-| [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoExplicitContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py)                | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoExplicitContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py) |
-| [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoLabelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py)                         | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoLabelsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py)          |
-| [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoShotsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py)                          | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoShotsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py)           |
-| [cloud.operators.vision.CloudVisionCreateProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                                | [contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                                 |
-| [cloud.operators.vision.CloudVisionCreateProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                             | [contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                              |
-| [cloud.operators.vision.CloudVisionCreateReferenceImageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                         | [contrib.operators.gcp_vision_operator.CloudVisionReferenceImageCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                          |
-| [cloud.operators.vision.CloudVisionDeleteProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                                | [contrib.operators.gcp_vision_operator.CloudVisionProductDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                                 |
-| [cloud.operators.vision.CloudVisionDeleteProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                             | [contrib.operators.gcp_vision_operator.CloudVisionProductSetDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                              |
-| [cloud.operators.vision.CloudVisionDetectImageLabelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                            | [contrib.operators.gcp_vision_operator.CloudVisionDetectImageLabelsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                             |
-| [cloud.operators.vision.CloudVisionDetectImageSafeSearchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                        | [contrib.operators.gcp_vision_operator.CloudVisionDetectImageSafeSearchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                         |
-| [cloud.operators.vision.CloudVisionDetectTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                                   | [contrib.operators.gcp_vision_operator.CloudVisionDetectTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                                    |
-| [cloud.operators.vision.CloudVisionGetProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                                   | [contrib.operators.gcp_vision_operator.CloudVisionProductGetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                                    |
-| [cloud.operators.vision.CloudVisionGetProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                                | [contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                                 |
-| [cloud.operators.vision.CloudVisionImageAnnotateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                                | [contrib.operators.gcp_vision_operator.CloudVisionAnnotateImageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                                 |
-| [cloud.operators.vision.CloudVisionRemoveProductFromProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                  | [contrib.operators.gcp_vision_operator.CloudVisionRemoveProductFromProductSetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                   |
-| [cloud.operators.vision.CloudVisionTextDetectOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                                   | [contrib.operators.gcp_vision_operator.CloudVisionDetectDocumentTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                            |
-| [cloud.operators.vision.CloudVisionUpdateProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                                | [contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                                 |
-| [cloud.operators.vision.CloudVisionUpdateProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                             | [contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                              |
-
-
-## Transfer operators
-
-
-### New transfer operators
-
-| New Airflow 2.0 transfers: `airflow.providers.google` package                                                                                                                          |
-|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/transfers/ads_to_gcs.py)                                  |
-| [cloud.transfers.azure_fileshare_to_gcs.AzureFileShareToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py) |
-| [cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py)    |
-| [cloud.transfers.gcs_to_local.GCSToLocalFilesystemOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_local.py)                    |
-| [cloud.transfers.gcs_to_sftp.GCSToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_sftp.py)                                 |
-| [cloud.transfers.presto_to_gcs.PrestoToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/presto_to_gcs.py)                           |
-| [cloud.transfers.salesforce_to_gcs.SalesforceToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py)               |
-| [cloud.transfers.sftp_to_gcs.SFTPToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sftp_to_gcs.py)                                 |
-| [cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sheets_to_gcs.py)                     |
-| [suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/transfers/gcs_to_sheets.py)                     |
-
-
-### Moved transfer operators
-
-| Airflow 2.0 transfers: `airflow.providers.google` package                                                                                                                         | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                                 |
-|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [cloud.transfers.adls_to_gcs.ADLSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/adls_to_gcs.py)                            | [contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_to_gcs.py)                               |
-| [cloud.transfers.bigquery_to_bigquery.BigQueryToBigQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py) | [contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_bigquery.py)                   |
-| [cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py)                | [contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_gcs.py)                         |
-| [cloud.transfers.bigquery_to_mysql.BigQueryToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py)          | [contrib.operators.bigquery_to_mysql_operator.BigQueryToMySqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_mysql_operator.py)          |
-| [cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py)             | [contrib.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/cassandra_to_gcs.py)                |
-| [cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py)                | [contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_bq.py)                               |
-| [cloud.transfers.gcs_to_gcs.GCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_gcs.py)                               | [contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gcs.py)                   |
-| [cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/local_to_gcs.py)               | [contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_gcs.py)                               |
-| [cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/mssql_to_gcs.py)                         | [contrib.operators.mssql_to_gcs.MsSqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mssql_to_gcs.py)                            |
-| [cloud.transfers.mysql_to_gcs.MySQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/mysql_to_gcs.py)                         | [contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mysql_to_gcs.py)                            |
-| [cloud.transfers.postgres_to_gcs.PostgresToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/postgres_to_gcs.py)                | [contrib.operators.postgres_to_gcs_operator.PostgresToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/postgres_to_gcs_operator.py) |
-| [cloud.transfers.s3_to_gcs.S3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/s3_to_gcs.py)                                  | [contrib.operators.s3_to_gcs_operator.S3ToGCSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_gcs_operator.py)                                  |
-| [cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sql_to_gcs.py)                           | [contrib.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sql_to_gcs.py)                              |
-| [suite.transfers.gcs_to_gdrive.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/transfers/gcs_to_gdrive.py)                 | [contrib.operators.gcs_to_gdrive_operator.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gdrive_operator.py)                 |
-
-
-## Sensors
-
-
-### New sensors
-
-| New Airflow 2.0 sensors: `airflow.providers.google` package                                                                                                                                                       |
-|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [cloud.sensors.bigquery.BigQueryTablePartitionExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigquery.py)                                                  |
-| [cloud.sensors.bigquery_dts.BigQueryDataTransferServiceTransferRunSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigquery_dts.py)                                   |
-| [cloud.sensors.dataflow.DataflowJobAutoScalingEventsSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataflow.py)                                                     |
-| [cloud.sensors.dataflow.DataflowJobMessagesSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataflow.py)                                                              |
-| [cloud.sensors.dataflow.DataflowJobMetricsSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataflow.py)                                                               |
-| [cloud.sensors.dataflow.DataflowJobStatusSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataflow.py)                                                                |
-| [cloud.sensors.dataproc.DataprocJobSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataproc.py)                                                                      |
-| [marketing_platform.sensors.campaign_manager.GoogleCampaignManagerReportSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/campaign_manager.py)            |
-| [marketing_platform.sensors.display_video.GoogleDisplayVideo360GetSDFDownloadOperationSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/display_video.py) |
-| [marketing_platform.sensors.display_video.GoogleDisplayVideo360ReportSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/display_video.py)                  |
-| [marketing_platform.sensors.search_ads.GoogleSearchAdsReportSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/search_ads.py)                              |
-
-
-### Moved sensors
-
-| Airflow 2.0 sensors: `airflow.providers.google` package                                                                                                                                                        | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                            |
-|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [cloud.sensors.bigquery.BigQueryTableExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigquery.py)                                                        | [contrib.sensors.bigquery_sensor.BigQueryTableSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/bigquery_sensor.py)                                   |
-| [cloud.sensors.bigtable.BigtableTableReplicationCompletedSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigtable.py)                                             | [contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) |
-| [cloud.sensors.cloud_storage_transfer_service.CloudDataTransferServiceJobStatusSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py) | [contrib.sensors.gcp_transfer_sensor.GCPTransferServiceWaitForJobStatusSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcp_transfer_sensor.py)      |
-| [cloud.sensors.gcs.GCSObjectExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py)                                                                      | [contrib.sensors.gcs_sensor.GoogleCloudStorageObjectSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py)                                  |
-| [cloud.sensors.gcs.GCSObjectUpdateSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py)                                                                         | [contrib.sensors.gcs_sensor.GoogleCloudStorageObjectUpdatedSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py)                           |
-| [cloud.sensors.gcs.GCSObjectsWtihPrefixExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py)                                                           | [contrib.sensors.gcs_sensor.GoogleCloudStoragePrefixSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py)                                  |
-| [cloud.sensors.gcs.GCSUploadSessionCompleteSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py)                                                                | [contrib.sensors.gcs_sensor.GoogleCloudStorageUploadSessionCompleteSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py)                   |
-| [cloud.sensors.pubsub.PubSubPullSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/pubsub.py)                                                                        | [contrib.sensors.pubsub_sensor.PubSubPullSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/pubsub_sensor.py)                                          |
-
-
-## Hooks
-
-
-### New hooks
-
-| New Airflow 2.0 hooks: `airflow.providers.google` package                                                                                                                                  |
-|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [ads.hooks.ads.GoogleAdsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/hooks/ads.py)                                                                     |
-| [cloud.hooks.automl.CloudAutoMLHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/automl.py)                                                         |
-| [cloud.hooks.bigquery_dts.BiqQueryDataTransferServiceHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigquery_dts.py)                             |
-| [cloud.hooks.cloud_memorystore.CloudMemorystoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_memorystore.py)                              |
-| [cloud.hooks.cloud_memorystore.CloudMemorystoreMemcachedHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_memorystore.py)                     |
-| [cloud.hooks.compute_ssh.ComputeEngineSSHHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/compute_ssh.py)                                          |
-| [cloud.hooks.datacatalog.CloudDataCatalogHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/datacatalog.py)                                          |
-| [cloud.hooks.datafusion.DataFusionHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/datafusion.py)                                                  |
-| [cloud.hooks.dataprep.GoogleDataprepHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dataprep.py)                                                  |
-| [cloud.hooks.gdm.GoogleDeploymentManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/gdm.py)                                                   |
-| [cloud.hooks.life_sciences.LifeSciencesHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/life_sciences.py)                                          |
-| [cloud.hooks.os_login.OSLoginHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/os_login.py)                                                         |
-| [cloud.hooks.secret_manager.SecretsManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/secret_manager.py)                                      |
-| [cloud.hooks.stackdriver.StackdriverHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/stackdriver.py)                                               |
-| [common.hooks.discovery_api.GoogleDiscoveryApiHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/common/hooks/discovery_api.py)                                  |
-| [firebase.hooks.firestore.CloudFirestoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/firebase/hooks/firestore.py)                                          |
-| [marketing_platform.hooks.analytics.GoogleAnalyticsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/analytics.py)                     |
-| [marketing_platform.hooks.campaign_manager.GoogleCampaignManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/campaign_manager.py) |
-| [marketing_platform.hooks.display_video.GoogleDisplayVideo360Hook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/display_video.py)       |
-| [marketing_platform.hooks.search_ads.GoogleSearchAdsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/search_ads.py)                   |
-| [suite.hooks.sheets.GSheetsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/hooks/sheets.py)                                                             |
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.google` package                                                                                                                                           | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                     |
-|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [cloud.hooks.bigquery.BigQueryHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigquery.py)                                                             | [contrib.hooks.bigquery_hook.BigQueryHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/bigquery_hook.py)                                           |
-| [cloud.hooks.bigtable.BigtableHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigtable.py)                                                             | [contrib.hooks.gcp_bigtable_hook.BigtableHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_bigtable_hook.py)                                   |
-| [cloud.hooks.cloud_build.CloudBuildHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_build.py)                                                     | [contrib.hooks.gcp_cloud_build_hook.CloudBuildHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_cloud_build_hook.py)                           |
-| [cloud.hooks.cloud_sql.CloudSQLDatabaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_sql.py)                                                   | [contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_sql_hook.py)                                     |
-| [cloud.hooks.cloud_sql.CloudSQLHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_sql.py)                                                           | [contrib.hooks.gcp_sql_hook.CloudSqlHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_sql_hook.py)                                             |
-| [cloud.hooks.cloud_storage_transfer_service.CloudDataTransferServiceHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py) | [contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_transfer_hook.py)                         |
-| [cloud.hooks.compute.ComputeEngineHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/compute.py)                                                          | [contrib.hooks.gcp_compute_hook.GceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_compute_hook.py)                                          |
-| [cloud.hooks.dataflow.DataflowHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dataflow.py)                                                             | [contrib.hooks.gcp_dataflow_hook.DataFlowHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_dataflow_hook.py)                                   |
-| [cloud.hooks.dataproc.DataprocHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dataproc.py)                                                             | [contrib.hooks.gcp_dataproc_hook.DataProcHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_dataproc_hook.py)                                   |
-| [cloud.hooks.datastore.DatastoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/datastore.py)                                                          | [contrib.hooks.datastore_hook.DatastoreHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/datastore_hook.py)                                        |
-| [cloud.hooks.dlp.CloudDLPHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dlp.py)                                                                       | [contrib.hooks.gcp_dlp_hook.CloudDLPHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_dlp_hook.py)                                             |
-| [cloud.hooks.functions.CloudFunctionsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/functions.py)                                                     | [contrib.hooks.gcp_function_hook.GcfHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_function_hook.py)                                        |
-| [cloud.hooks.gcs.GCSHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/gcs.py)                                                                            | [contrib.hooks.gcs_hook.GoogleCloudStorageHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcs_hook.py)                                           |
-| [cloud.hooks.kms.CloudKMSHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/kms.py)                                                                       | [contrib.hooks.gcp_kms_hook.GoogleCloudKMSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_kms_hook.py)                                       |
-| [cloud.hooks.kubernetes_engine.GKEHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/kubernetes_engine.py)                                                | [contrib.hooks.gcp_container_hook.GKEClusterHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_container_hook.py)                               |
-| [cloud.hooks.mlengine.MLEngineHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/mlengine.py)                                                             | [contrib.hooks.gcp_mlengine_hook.MLEngineHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_mlengine_hook.py)                                   |
-| [cloud.hooks.natural_language.CloudNaturalLanguageHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/natural_language.py)                                 | [contrib.hooks.gcp_natural_language_hook.CloudNaturalLanguageHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_natural_language_hook.py)       |
-| [cloud.hooks.pubsub.PubSubHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/pubsub.py)                                                                   | [contrib.hooks.gcp_pubsub_hook.PubSubHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_pubsub_hook.py)                                         |
-| [cloud.hooks.spanner.SpannerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/spanner.py)                                                                | [contrib.hooks.gcp_spanner_hook.CloudSpannerHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_spanner_hook.py)                                 |
-| [cloud.hooks.speech_to_text.CloudSpeechToTextHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/speech_to_text.py)                                        | [contrib.hooks.gcp_speech_to_text_hook.GCPSpeechToTextHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_speech_to_text_hook.py)                |
-| [cloud.hooks.tasks.CloudTasksHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/tasks.py)                                                                 | [contrib.hooks.gcp_tasks_hook.CloudTasksHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_tasks_hook.py)                                       |
-| [cloud.hooks.text_to_speech.CloudTextToSpeechHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/text_to_speech.py)                                        | [contrib.hooks.gcp_text_to_speech_hook.GCPTextToSpeechHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_text_to_speech_hook.py)                |
-| [cloud.hooks.translate.CloudTranslateHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/translate.py)                                                     | [contrib.hooks.gcp_translate_hook.CloudTranslateHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_translate_hook.py)                           |
-| [cloud.hooks.video_intelligence.CloudVideoIntelligenceHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/video_intelligence.py)                           | [contrib.hooks.gcp_video_intelligence_hook.CloudVideoIntelligenceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_video_intelligence_hook.py) |
-| [cloud.hooks.vision.CloudVisionHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/vision.py)                                                              | [contrib.hooks.gcp_vision_hook.CloudVisionHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_vision_hook.py)                                    |
-| [common.hooks.base_google.GoogleBaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/common/hooks/base_google.py)                                                   | [contrib.hooks.gcp_api_base_hook.GoogleBaseHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_api_base_hook.py)                                 |
-| [suite.hooks.drive.GoogleDriveHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/hooks/drive.py)                                                                | [contrib.hooks.gdrive_hook.GoogleDriveHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gdrive_hook.py)                                            |
-
-
-## Secrets
-
-
-
-### Moved secrets
-
-| Airflow 2.0 secrets: `airflow.providers.google` package                                                                                                          | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                         |
-|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [cloud.secrets.secret_manager.CloudSecretManagerBackend](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/secrets/secret_manager.py) | [contrib.secrets.gcp_secrets_manager.CloudSecretsManagerBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/gcp_secrets_manager.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [1dcd3e13f](https://github.com/apache/airflow/commit/1dcd3e13fd0a078fc9440e91b77f6f87aa60dd3b) | 2020-12-05  | Add support for extra links coming from the providers (#12472)                                                                                                     |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                                                                                                 |
-| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29  | Don&#39;t use time.time() or timezone.utcnow() for duration calculations (#12353)                                                                                      |
-| [76bcd08dc](https://github.com/apache/airflow/commit/76bcd08dcae8d62307f5e9b8c2e182b54ed22a27) | 2020-11-28  | Added `@apply_defaults` decorator. (#12620)                                                                                                                        |
-| [e1ebfa68b](https://github.com/apache/airflow/commit/e1ebfa68b109b5993c47891cfd0b9b7e46b6d770) | 2020-11-27  | Add DataflowJobMessagesSensor and DataflowAutoscalingEventsSensor (#12249)                                                                                         |
-| [3fa51f94d](https://github.com/apache/airflow/commit/3fa51f94d7a17f170ddc31908d36c91f4456a20b) | 2020-11-24  | Add check for duplicates in provider.yaml files (#12578)                                                                                                           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [9e3b2c554](https://github.com/apache/airflow/commit/9e3b2c554dadf58972198e4e16f15af2f15ec37a) | 2020-11-19  | GCP Secrets Optional Lookup (#12360)                                                                                                                               |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)                                                                                        |
-| [8d0950646](https://github.com/apache/airflow/commit/8d09506464c8480fa42e8bfe6a36c6f631cd23f6) | 2020-11-18  | Fix download method in GCSToBigQueryOperator (#12442)                                                                                                              |
-| [2c0920fba](https://github.com/apache/airflow/commit/2c0920fba5d2f05d2e29cead91127686af277ec2) | 2020-11-17  | Adds mechanism for provider package discovery. (#12383)                                                                                                            |
-| [2cda2f2a0](https://github.com/apache/airflow/commit/2cda2f2a0a94e5aaed87f0998fa57b4f8bff5e43) | 2020-11-17  | Add missing pre-commit definition - provider-yamls (#12393)                                                                                                        |
-| [80a957f14](https://github.com/apache/airflow/commit/80a957f142f260daed262b8e93a4d02c12cfeabc) | 2020-11-17  | Add Dataflow sensors - job metrics (#12039)                                                                                                                        |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [917e6c442](https://github.com/apache/airflow/commit/917e6c4424985271c53dd8c413b211896ee55726) | 2020-11-16  | Add provide_file_and_upload to GCSHook (#12310)                                                                                                                    |
-| [cfa4ecfeb](https://github.com/apache/airflow/commit/cfa4ecfeb02661f40b4778733384ac085fb5f04b) | 2020-11-15  | Add DataflowJobStatusSensor and support non-blocking execution of jobs (#11726)                                                                                    |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [32b59f835](https://github.com/apache/airflow/commit/32b59f8350f55793df6838a32de662a80483ecda) | 2020-11-12  | Fixes the sending of an empty list to BigQuery `list_rows` (#12307)                                                                                                |
-| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10  | Fix spelling in Python files (#12230)                                                                                                                              |
-| [502ba309e](https://github.com/apache/airflow/commit/502ba309ea470943f0e99c634269e3d2d13ce6ca) | 2020-11-10  | Enable Markdownlint rule - MD022/blanks-around-headings (#12225)                                                                                                   |
-| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10  | Simplify string expressions &amp; Use f-string (#12216)                                                                                                                |
-| [f37c6e6fc](https://github.com/apache/airflow/commit/f37c6e6fce8b704f5af28caa16d0ed7d873a0e4a) | 2020-11-10  | Add Compute Engine SSH hook (#9879)                                                                                                                                |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [61feb6ec4](https://github.com/apache/airflow/commit/61feb6ec453f8dda1a0e1fe3ebcc0f1e3224b634) | 2020-11-09  | Provider&#39;s readmes generated for elasticsearch and google packages (#12194)                                                                                        |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08  | Add authentication to AWS with Google credentials (#12079)                                                                                                         |
-| [2ef3b7ef8](https://github.com/apache/airflow/commit/2ef3b7ef8cafe3bdc8bf8db70fbc519b98576366) | 2020-11-08  | Fix ERROR - Object of type &#39;bytes&#39; is not JSON serializable when using store_to_xcom_key parameter (#12172)                                                        |
-| [0caec9fd3](https://github.com/apache/airflow/commit/0caec9fd32bee2b3036b5d7bdcb56bd6a3b9dccf) | 2020-11-06  | Dataflow - add waiting for successful job cancel (#11501)                                                                                                          |
-| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06  | Simplify string expressions (#12123)                                                                                                                               |
-| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04  | Format all files (without excepions) by black (#12091)                                                                                                             |
-| [fd3db778e](https://github.com/apache/airflow/commit/fd3db778e715d0f164dda7ee8f672d477a323291) | 2020-11-04  | Add server side cursor support for postgres to GCS operator (#11793)                                                                                               |
-| [f1f194026](https://github.com/apache/airflow/commit/f1f1940261744b4fdb67b0b5654488494efa9c64) | 2020-11-04  | Add DataflowStartSQLQuery operator (#8553)                                                                                                                         |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                                                                                               |
-| [5f5244b74](https://github.com/apache/airflow/commit/5f5244b74df93cadbb99643cec76281460ca4411) | 2020-11-04  | Add template fields renderers to Biguery and Dataproc operators (#12067)                                                                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                                                                                                                  |
-| [45ae145c2](https://github.com/apache/airflow/commit/45ae145c25a19b4185c33ac0c4da920324b3987e) | 2020-11-03  | Log BigQuery job id in insert method of BigQueryHook (#12056)                                                                                                      |
-| [e324b37a6](https://github.com/apache/airflow/commit/e324b37a67e32c368df50604a00160d7766b5c33) | 2020-11-03  | Add job name and progress logs to Cloud Storage Transfer Hook (#12014)                                                                                             |
-| [6071fdd58](https://github.com/apache/airflow/commit/6071fdd58470bb2a6c23fc16481e292b7247d0bb) | 2020-11-02  | Improve handling server errors in DataprocSubmitJobOperator (#11947)                                                                                               |
-| [2f703df12](https://github.com/apache/airflow/commit/2f703df12dfd6511722ff9a82d5a569d092fccc2) | 2020-10-30  | Add SalesforceToGcsOperator (#10760)                                                                                                                               |
-| [e5713e00b](https://github.com/apache/airflow/commit/e5713e00b3afcba6f78006ec0e360da317858e4d) | 2020-10-29  | Add drain option when canceling Dataflow pipelines (#11374)                                                                                                        |
-| [37eaac3c5](https://github.com/apache/airflow/commit/37eaac3c5dc93804413c10a6ca124fd7831befc0) | 2020-10-29  | The PRs which are not approved run subset of tests (#11828)                                                                                                        |
-| [79cb77199](https://github.com/apache/airflow/commit/79cb771992279d40ddd9eb6b0277382313a32898) | 2020-10-28  | Fixing re pattern and changing to use a single character class. (#11857)                                                                                           |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [240c7d4d7](https://github.com/apache/airflow/commit/240c7d4d72aac8f6aab98f5913e8f54c4f1372ff) | 2020-10-26  | Google Memcached hooks - improve protobuf messages handling (#11743)                                                                                               |
-| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26  | Fix spellings (#11825)                                                                                                                                             |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24  | Fix spelling (#11821)                                                                                                                                              |
-| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24  | Use Python 3 style super classes (#11806)                                                                                                                          |
-| [727c739af](https://github.com/apache/airflow/commit/727c739afb565d4d394a8faedc969334cb8e738e) | 2020-10-22  | Improve Cloud Memorystore for Redis example (#11735)                                                                                                               |
-| [1da8379c9](https://github.com/apache/airflow/commit/1da8379c913843834353b44861c62f332a461bdf) | 2020-10-22  | Fix static checks after merging #10121 (#11737)                                                                                                                    |
-| [91503308c](https://github.com/apache/airflow/commit/91503308c723b186ce6f4026f2a3e2c21030f6e5) | 2020-10-22  | Add Google Cloud Memorystore Memcached Operators (#10121)                                                                                                          |
-| [950c16d0b](https://github.com/apache/airflow/commit/950c16d0b0ab67bb7af11909de751029faf0313a) | 2020-10-21  | Retry requests in case of error in Google ML Engine Hook (#11712)                                                                                                  |
-| [2bfc53b5e](https://github.com/apache/airflow/commit/2bfc53b5eb67406d418371b74dc9bc5a07be238e) | 2020-10-21  | Fix doc errors in google provider files. (#11713)                                                                                                                  |
-| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21  | Enforce strict rules for yamllint (#11709)                                                                                                                         |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [2d854c350](https://github.com/apache/airflow/commit/2d854c3505ccad66e9a7d94267e51bed800433c2) | 2020-10-19  | Add service_account to Google ML Engine operator (#11619)                                                                                                          |
-| [46a121fb7](https://github.com/apache/airflow/commit/46a121fb7b77c0964e053b58750e2d8bc2bd0b2a) | 2020-10-18  | docs: Update Bigquery clustering docstrings (#11232)                                                                                                               |
-| [49c58147f](https://github.com/apache/airflow/commit/49c58147fed8a52869d0b0ecc00c102c11972ad0) | 2020-10-18  | Strict type checking for provider Google (#11609)                                                                                                                  |
-| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16  | Add type annotations for AWS operators and hooks (#11434)                                                                                                          |
-| [3c10ca650](https://github.com/apache/airflow/commit/3c10ca6504be37fabff9a10caefea3fe4df31a02) | 2020-10-16  | Add DataflowStartFlexTemplateOperator (#8550)                                                                                                                      |
-| [8865d14df](https://github.com/apache/airflow/commit/8865d14df4d58dd5f1a4d2ff81c77469959f175a) | 2020-10-16  | Strict type checking for provider google cloud  (#11548)                                                                                                           |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [06141d6d0](https://github.com/apache/airflow/commit/06141d6d01398115e5e54c5766a46ae5514ba2f7) | 2020-10-12  | Google cloud operator strict type check (#11450)                                                                                                                   |
-| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12  | Remove redundant None provided as default to dict.get() (#11448)                                                                                                   |
-| [1845cd11b](https://github.com/apache/airflow/commit/1845cd11b77f302777ab854e84bef9c212c604a0) | 2020-10-11  | Strict type check for google ads and cloud hooks (#11390)                                                                                                          |
-| [bd204bb91](https://github.com/apache/airflow/commit/bd204bb91b4bc069284f9a44757c6baba8884140) | 2020-10-11  | Optionally set null marker in csv exports in BaseSQLToGCSOperator (#11409)                                                                                         |
-| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10  | Remove redundant parentheses from Python files (#10967)                                                                                                            |
-| [8baf657fc](https://github.com/apache/airflow/commit/8baf657fc2b21a601b99b752e4f1176bf8a934ce) | 2020-10-09  | Fix regression in DataflowTemplatedJobStartOperator (#11167)                                                                                                       |
-| [b0fcf6755](https://github.com/apache/airflow/commit/b0fcf675595494b306800e1a516548dc0dc671f8) | 2020-10-07  | Add AzureFileShareToGCSOperator (#10991)                                                                                                                           |
-| [47b05a87f](https://github.com/apache/airflow/commit/47b05a87f004dc273a4757ba49f03808a86f77e7) | 2020-10-07  | Improve handling of job_id in BigQuery operators (#11287)                                                                                                          |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [cb52fb0ae](https://github.com/apache/airflow/commit/cb52fb0ae1de1f1140babaed0e97299e4aaf96bf) | 2020-09-27  | Add example DAG and system test for MySQLToGCSOperator (#10990)                                                                                                    |
-| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25  | Fix incorrect Usage of Optional[str] &amp; Optional[int] (#11141)                                                                                                      |
-| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24  | Fix incorrect Usage of Optional[bool] (#11138)                                                                                                                     |
-| [daf8f3108](https://github.com/apache/airflow/commit/daf8f31080f06c044b4336071bd383bbbcdc6085) | 2020-09-23  | Add template fields renderers for better UI rendering (#11061)                                                                                                     |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                                                                                                 |
-| [cb979f9f2](https://github.com/apache/airflow/commit/cb979f9f213bb3c9835a3dc924f84a07f5387378) | 2020-09-22  | Get Airflow configs with sensitive data from CloudSecretManagerBackend (#11024)                                                                                    |
-| [76545bb3d](https://github.com/apache/airflow/commit/76545bb3d6fa82ce8eae072dbc74a3b76d8fd53c) | 2020-09-16  | Add example dag and system test for S3ToGCSOperator (#10951)                                                                                                       |
-| [22c631625](https://github.com/apache/airflow/commit/22c631625fd68abe280528f33b7cfd7603ebf66c) | 2020-09-16  | Fix more docs spellings (#10965)                                                                                                                                   |
-| [12a652f53](https://github.com/apache/airflow/commit/12a652f5344c7f03c3d780556ca1829b235fdb2d) | 2020-09-13  | Fix parameter name collision in AutoMLBatchPredictOperator #10723 (#10869)                                                                                         |
-| [41a62735e](https://github.com/apache/airflow/commit/41a62735edcebbd9c39e505280646ef5d25aa1d5) | 2020-09-11  | Add on_kill method to BigQueryInsertJobOperator (#10866)                                                                                                           |
-| [3e91da56e](https://github.com/apache/airflow/commit/3e91da56e8c63a90dc859d8996a896b5d9f8cd43) | 2020-09-11  | fix typo in firebase/example_filestore DAG (#10875)                                                                                                                |
-| [68cc7273b](https://github.com/apache/airflow/commit/68cc7273bf0c0f562748b5f663da5c12d2cba6a7) | 2020-09-10  | Add on_kill method to DataprocSubmitJobOperator (#10847)                                                                                                           |
-| [f92095721](https://github.com/apache/airflow/commit/f92095721450c14605c986e165544a7bfb712a3d) | 2020-09-10  | Fix and remove some more typos from spelling_wordlist.txt (#10845)                                                                                                 |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [078bfaf60](https://github.com/apache/airflow/commit/078bfaf60adc5aebac8c347e7f6e5339ab9b56c0) | 2020-09-08  | Extract missing gcs_to_local example DAG from gcs example (#10767)                                                                                                 |
-| [10ce31127](https://github.com/apache/airflow/commit/10ce31127f1ff87176158935925afce46a989917) | 2020-09-08  | Deprecate using global as the default region in Google Dataproc operators and hooks (#10772)                                                                       |
-| [f14f37971](https://github.com/apache/airflow/commit/f14f3797163cc45fdcdabfb36ee7d638f70e470d) | 2020-09-07  | [AIRFLOW-10672] Refactor BigQueryToGCSOperator to use new method (#10773)                                                                                          |
-| [c8ee45568](https://github.com/apache/airflow/commit/c8ee4556851c36b3b6e644a7746a49583dd53db1) | 2020-09-07  | Refactor DataprocCreateCluster operator to use simpler interface (#10403)                                                                                          |
-| [ece685b5b](https://github.com/apache/airflow/commit/ece685b5b895ad1175440b49bf9e620dffd8248d) | 2020-09-05  | Asynchronous execution of Dataproc jobs with a Sensor (#10673)                                                                                                     |
-| [6e3d7b63d](https://github.com/apache/airflow/commit/6e3d7b63d3b34c34f8b38a7b41f4a5876e1f731f) | 2020-09-04  | Add masterConfig parameter to MLEngineStartTrainingJobOperator (#10578)                                                                                            |
-| [804548d58](https://github.com/apache/airflow/commit/804548d58f2036fd4516824a38d0639ba5d5ab0e) | 2020-09-01  | Add Dataprep operators (#10304)                                                                                                                                    |
-| [11c00bc82](https://github.com/apache/airflow/commit/11c00bc820483691a87cdb16d519dce8dc57c40e) | 2020-08-30  | Fix typos: duplicated &#34;the&#34; (#10647)                                                                                                                               |
-| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29  | Update Google Cloud branding (#10642)                                                                                                                              |
-| [1b533f617](https://github.com/apache/airflow/commit/1b533f617e2e0200597d114d7570f6c0d69da1a0) | 2020-08-28  | Fix broken master - DLP (#10635)                                                                                                                                   |
-| [5ae82a56d](https://github.com/apache/airflow/commit/5ae82a56dab599de44f1be7027cecc4ef86f7bb6) | 2020-08-28  | Fix Google DLP example and improve ops idempotency (#10608)                                                                                                        |
-| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28  | Update Google Cloud branding (#10615)                                                                                                                              |
-| [91ff31ad1](https://github.com/apache/airflow/commit/91ff31ad1021235bd21c87ad9dbc0b216a908671) | 2020-08-27  | Documentation for Google Cloud Data Loss Prevention (#8201) (#9651)                                                                                                |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25  | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)                                                                                             |
-| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25  | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530)                                                                                              |
-| [866701c80](https://github.com/apache/airflow/commit/866701c8019f49dcb02c9696e4f6e9ce67d13ca6) | 2020-08-25  | Fix typo in &#34;Cloud&#34; (#10534)                                                                                                                                       |
-| [47265e7b5](https://github.com/apache/airflow/commit/47265e7b58bc28bcbbffc981442b6cc27a3af39c) | 2020-08-24  | Fix typo in PostgresHook (#10529)                                                                                                                                  |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                                                                                                     |
-| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24  | Implement impersonation in google operators (#10052)                                                                                                               |
-| [b0598b535](https://github.com/apache/airflow/commit/b0598b5351d2d027286e2333231b6c0c0704dba2) | 2020-08-24  | Add support for creating multiple replicated clusters in Bigtable hook and operator (#10475)                                                                       |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [515cc72c9](https://github.com/apache/airflow/commit/515cc72c995429c8c007f853ade385d79fcbac90) | 2020-08-22  | Fix typo in timed_out (#10459)                                                                                                                                     |
-| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22  | Replace assigment with Augmented assignment (#10468)                                                                                                               |
-| [88c7d2e52](https://github.com/apache/airflow/commit/88c7d2e526af4994066f65f830e2fa8edcbbce2e) | 2020-08-21  | Dataflow operators don&#39;t not always create a virtualenv (#10373)                                                                                                   |
-| [083c3c129](https://github.com/apache/airflow/commit/083c3c129bc3458d410f5ff37d7f5a9a7ad548b7) | 2020-08-18  | Simplified GCSTaskHandler configuration (#10365)                                                                                                                   |
-| [1ae5bdf23](https://github.com/apache/airflow/commit/1ae5bdf23e3ac7cca05325ef8b255a7cf067e18e) | 2020-08-17  | Add test for GCSTaskHandler (#9600) (#9861)                                                                                                                        |
-| [e195a980b](https://github.com/apache/airflow/commit/e195a980bc8e9d42f3eb4ac134950977b9e5158f) | 2020-08-16  | Add type annotations for mlengine_operator_utils (#10297)                                                                                                          |
-| [382c1011b](https://github.com/apache/airflow/commit/382c1011b6bcebd22760e2f98419281ef1a09d1b) | 2020-08-16  | Add Bigtable Update Instance Hook/Operator (#10340)                                                                                                                |
-| [bfa5a8d5f](https://github.com/apache/airflow/commit/bfa5a8d5f10458c14d380c4042ecfbac627d0639) | 2020-08-15  | CI: Fix failing docs-build (#10342)                                                                                                                                |
-| [be46d20fb](https://github.com/apache/airflow/commit/be46d20fb431cc1d91c935e8894dfc7756c18993) | 2020-08-15  | Improve idempotency of BigQueryInsertJobOperator (#9590)                                                                                                           |
-| [47387a69e](https://github.com/apache/airflow/commit/47387a69e623676b57b6d42ff07e729da2d21bff) | 2020-08-14  | Catch Permission Denied exception when getting secret from GCP Secret Manager. (#10326)                                                                            |
-| [2f0613b0c](https://github.com/apache/airflow/commit/2f0613b0c2fdf176d9f13a8cd12162c60c64b644) | 2020-08-13  | Implement Google BigQuery Table Partition Sensor (#10218)                                                                                                          |
-| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12  | Enable Sphinx spellcheck for doc generation (#10280)                                                                                                               |
-| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12  | DbApiHook: Support kwargs in get_pandas_df (#9730)                                                                                                                 |
-| [ef088314f](https://github.com/apache/airflow/commit/ef088314f8f1b29ac636a7584cf9dda04b1df816) | 2020-08-09  | Added DataprepGetJobsForJobGroupOperator (#10246)                                                                                                                  |
-| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09  | Fix various typos in the repo (#10263)                                                                                                                             |
-| [c29533888](https://github.com/apache/airflow/commit/c29533888fadd40f5e9ce63e728bd8691182e542) | 2020-08-08  | Add labels param to Google MLEngine Operators (#10222)                                                                                                             |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                                                                                        |
-| [eff0f0321](https://github.com/apache/airflow/commit/eff0f03210d30a4aed9ed457eaaea9c9f05d54d1) | 2020-08-06  | Update guide for Google Cloud Secret Manager Backend (#10172)                                                                                                      |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)                                                                                               |
-| [010322692](https://github.com/apache/airflow/commit/010322692e6e3f0adc156f0beb81e267da0e97bb) | 2020-08-06  | Improve handling Dataproc cluster creation with ERROR state (#9593)                                                                                                |
-| [1437cb749](https://github.com/apache/airflow/commit/1437cb74955f4e10af5d70ebadde1e6b163fb9b7) | 2020-08-04  | Add correct signatures for operators in google provider package (#10144)                                                                                           |
-| [6efa1b9cb](https://github.com/apache/airflow/commit/6efa1b9cb763ae0bdbc884a54d24dbdc39d9e3a6) | 2020-08-03  | Add additional Cloud Datastore operators (#10032)                                                                                                                  |
-| [27020f8e5](https://github.com/apache/airflow/commit/27020f8e588575d53e63f9f9daecd3a522656644) | 2020-08-03  | Add try clause to DataFusionHook.wait_for_pipeline_state (#10031)                                                                                                  |
-| [4e3799fec](https://github.com/apache/airflow/commit/4e3799fec4c23d0f43603a0489c5a6158aeba035) | 2020-08-02  | [AIRFLOW-4541] Replace os.mkdirs usage with pathlib.Path(path).mkdir (#10117)                                                                                      |
-| [85c56b173](https://github.com/apache/airflow/commit/85c56b1737c2bf61751836571300445c0aebae1a) | 2020-08-02  | Add missing params to GCP Pub/Sub creation_subscription (#10106)                                                                                                   |
-| [b79466c12](https://github.com/apache/airflow/commit/b79466c12f3ae717c31804acc2e9ffcd60f9611c) | 2020-08-02  | Fix sensor not providing arguments for GCSHook (#10074)                                                                                                            |
-| [4ee35d027](https://github.com/apache/airflow/commit/4ee35d027988c6456767faeb108a7f686d5117f2) | 2020-08-02  | Fix hook not passing gcp_conn_id to base class (#10075)                                                                                                            |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                                                                                               |
-| [4c84661ad](https://github.com/apache/airflow/commit/4c84661adb5bb5c581bb4193b4c7e935cbe07758) | 2020-07-31  | Split Display Video 360 example into smaler DAGs (#10077)                                                                                                          |
-| [59cbff087](https://github.com/apache/airflow/commit/59cbff0874dd5318cda4b9ce7b7eeb1aad1dad4d) | 2020-07-29  | Fix docstrings in BigQueryGetDataOperator (#10042)                                                                                                                 |
-| [81b87d48e](https://github.com/apache/airflow/commit/81b87d48ed002d7a7f7bcb72a58e82d40a176fe2) | 2020-07-27  | Add unit tests for GcpBodyFieldSanitizer in Google providers (#9996)                                                                                               |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25  | Stop using start_date in default_args in example_dags (#9982)                                                                                                      |
-| [ef98edf4d](https://github.com/apache/airflow/commit/ef98edf4da2d9b74d5cf5b21e81577b3151edb79) | 2020-07-23  | Add more information about using GoogleAdsHook (#9951)                                                                                                             |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                                                                                        |
-| [39a0288a4](https://github.com/apache/airflow/commit/39a0288a47536dfd9b651ecd075887d3e45fcfc4) | 2020-07-22  | Add Google Authentication for experimental API (#9848)                                                                                                             |
-| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22  | More strict rules in mypy (#9705) (#9906)                                                                                                                          |
-| [c4244e18b](https://github.com/apache/airflow/commit/c4244e18bb894eb2180b8972052e56110fe5cbc9) | 2020-07-22  | Fix calling `get_client` in BigQueryHook.table_exists (#9916)                                                                                                      |
-| [5eacc1642](https://github.com/apache/airflow/commit/5eacc164201a121cd06126aff613cbe0919d35cc) | 2020-07-22  | Add support for impersonation in GCP hooks (#9915)                                                                                                                 |
-| [1cfdebf5f](https://github.com/apache/airflow/commit/1cfdebf5f8841d61a11540b88c7913686e89e085) | 2020-07-21  | Fix insert_job method of BigQueryHook (#9899)                                                                                                                      |
-| [c8c52e69c](https://github.com/apache/airflow/commit/c8c52e69c8d9cc1f26f63d95aecc0a6498d40b6f) | 2020-07-21  | Remove type hint causing DeprecationWarning in Firestore operators (#9819)                                                                                         |
-| [eb6f1d1cf](https://github.com/apache/airflow/commit/eb6f1d1cf0503fa763c0d8d34a2fe16efb390b9c) | 2020-07-16  | Fix typo in datafusion operator (#9859)                                                                                                                            |
-| [b01d95ec2](https://github.com/apache/airflow/commit/b01d95ec22b01ed79123178acd74ef40d57aaa7c) | 2020-07-15  | Change DAG.clear to take dag_run_state (#9824)                                                                                                                     |
-| [6d65c15d1](https://github.com/apache/airflow/commit/6d65c15d156a41d5e735e44a1170426559a17d1f) | 2020-07-15  | Add guide for AI Platform (previously Machine Learning Engine) Operators  (#9798)                                                                                  |
-| [770de53eb](https://github.com/apache/airflow/commit/770de53eb57bd57ffc555ad15b18f0c058dbebe7) | 2020-07-15  | BigQueryTableExistenceSensor needs to specify keyword arguments (#9832)                                                                                            |
-| [2d8dbacdf](https://github.com/apache/airflow/commit/2d8dbacdf6c19a598a7f55bcf65e28703aed6201) | 2020-07-15  | Add CloudVisionDeleteReferenceImageOperator  (#9698)                                                                                                               |
-| [9f017951b](https://github.com/apache/airflow/commit/9f017951b94d9bf52b5ee66d72aa8dd822f07269) | 2020-07-15  | Add Google Deployment Manager Hook (#9159)                                                                                                                         |
-| [ed5004cca](https://github.com/apache/airflow/commit/ed5004cca753650dc222fbb8e67573938c6c16d9) | 2020-07-14  | Allow `replace` flag in gcs_to_gcs operator. (#9667)                                                                                                               |
-| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13  | Keep functions signatures in decorators (#9786)                                                                                                                    |
-| [68925904e](https://github.com/apache/airflow/commit/68925904e49aac6968defb6834863f4e6347fe59) | 2020-07-13  | Add multiple file upload functionality to GCS hook (#8849)                                                                                                         |
-| [1de78e8f9](https://github.com/apache/airflow/commit/1de78e8f97f48f8f4abd167a0120ffab8af6127a) | 2020-07-12  | Add Google Stackdriver link (#9765)                                                                                                                                |
-| [092d33f29](https://github.com/apache/airflow/commit/092d33f298a7dbb871b1e1b4c17aad3989e89b79) | 2020-07-11  | Fix StackdriverTaskHandler + add system tests (#9761)                                                                                                              |
-| [b2305660f](https://github.com/apache/airflow/commit/b2305660f0eb55ebd31fdc7fe4e8aeed8c1f8c00) | 2020-07-09  | Update example DAG for AI Platform operators (#9727)                                                                                                               |
-| [23f80f34a](https://github.com/apache/airflow/commit/23f80f34adec86da24e4896168c53d213d01a7f6) | 2020-07-08  | Move gcs &amp; wasb task handlers to their respective provider packages (#9714)                                                                                        |
-| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06  | Upgrade to latest pre-commit checks (#9686)                                                                                                                        |
-| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06  | Move provider&#39;s log task handlers to the provider package (#9604)                                                                                                  |
-| [cd3d9d934](https://github.com/apache/airflow/commit/cd3d9d93402f06a08f35e3586802f11a18c4f1f3) | 2020-07-02  | Fix using .json template extension in GMP operators (#9566)                                                                                                        |
-| [4799af30e](https://github.com/apache/airflow/commit/4799af30ee02c596647d1538854769124f9f4961) | 2020-06-30  | Extend BigQuery example with include clause (#9572)                                                                                                                |
-| [e33f1a12d](https://github.com/apache/airflow/commit/e33f1a12d72ac234e4897f44b326a332acf85901) | 2020-06-30  | Add template_ext to BigQueryInsertJobOperator (#9568)                                                                                                              |
-| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29  | Remove almost all references to airflow.contrib (#9559)                                                                                                            |
-| [c420dbd6e](https://github.com/apache/airflow/commit/c420dbd6e13e17867eb4ccc4271b37966310ac0f) | 2020-06-27  | Bump Pylint to 2.5.3 (#9294)                                                                                                                                       |
-| [0051c89cb](https://github.com/apache/airflow/commit/0051c89cba02d55236c913ce0110f7d5111ba436) | 2020-06-26  | nitpick fix (#9527)                                                                                                                                                |
-| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25  | Use literal syntax instead of function calls to create data structure (#9516)                                                                                      |
-| [7256f4caa](https://github.com/apache/airflow/commit/7256f4caa226f8f8632d6e2d38d8c94cb3250a6f) | 2020-06-22  | Pylint fixes and deprecation of rare used methods in Connection (#9419)                                                                                            |
-| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21  | Enable &amp; Fix Whitespace related PyDocStyle Checks (#9458)                                                                                                          |
-| [5b680e27e](https://github.com/apache/airflow/commit/5b680e27e8118861ef484c00a4b87c6885b0a518) | 2020-06-19  | Don&#39;t use connection to store task handler credentials (#9381)                                                                                                     |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [416334e2e](https://github.com/apache/airflow/commit/416334e2ecd21d8a532af6102f1cfa9ac921a97a) | 2020-06-19  | Properly propagated warnings in operators (#9348)                                                                                                                  |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [4e09c6442](https://github.com/apache/airflow/commit/4e09c64423bfaabd02a18b5fe7757dc15451ab73) | 2020-06-18  | Adds GCP Secret Manager Hook (#9368)                                                                                                                               |
-| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18  | Detect automatically the lack of reference to the guide in the operator descriptions (#9290)                                                                       |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [639972d99](https://github.com/apache/airflow/commit/639972d995d848b16a3f283576efdbde28b8fdef) | 2020-06-16  | Add support for latest Apache Beam SDK in Dataflow operators (#9323)                                                                                               |
-| [1459970b3](https://github.com/apache/airflow/commit/1459970b3b9780e139ce029ae889fd8f69a37bc7) | 2020-06-15  | Rename CloudBuildCreateBuildOperator to CloudBuildCreateOperator (#9314)                                                                                           |
-| [431ea3291](https://github.com/apache/airflow/commit/431ea3291c9bf236bccdf8446d753c630ada2b25) | 2020-06-15  | Resolve upstream tasks when template field is XComArg (#8805)                                                                                                      |
-| [aee6ab94e](https://github.com/apache/airflow/commit/aee6ab94eb956347ad560cfe2673bc6011074513) | 2020-06-15  | Wait for pipeline state in Data Fusion operators (#8954)                                                                                                           |
-| [fb1c8b83d](https://github.com/apache/airflow/commit/fb1c8b83d400506a16c10e3d6623a913847e5cf5) | 2020-06-10  | Add test for BQ operations using location (#9206)                                                                                                                  |
-| [a26afbfa5](https://github.com/apache/airflow/commit/a26afbfa51b0981ae742c6171938b57a80aace2b) | 2020-06-10  | Make generated job_id more informative in BQ insert_job (#9203)                                                                                                    |
-| [c41192fa1](https://github.com/apache/airflow/commit/c41192fa1fc5c2b3e7b8414c59f656ab67bbef28) | 2020-06-10  | Upgrade pendulum to latest major version ~2.0 (#9184)                                                                                                              |
-| [b1c8c5ed5](https://github.com/apache/airflow/commit/b1c8c5ed5bba3a852a5446f3fdd1131b4b22637a) | 2020-06-09  | Allows using private endpoints in GKEStartPodOperator (#9169)                                                                                                      |
-| [5918efc86](https://github.com/apache/airflow/commit/5918efc86a2217caa641a6ada289eee1c21407f8) | 2020-06-05  | Add 3.8 to the test matrices (#8836)                                                                                                                               |
-| [9bcdadaf7](https://github.com/apache/airflow/commit/9bcdadaf7e6e73d3d2246fbbd32a9f30a1b43ca9) | 2020-06-05  | Add &#39;main&#39; param to template_fields in DataprocSubmitPySparkJobOperator (#9154)                                                                                    |
-| [f56811dff](https://github.com/apache/airflow/commit/f56811dff3af66cbceb0418f11e00507bab58674) | 2020-06-05  | [AIRFLOW-6290] Create guide for GKE operators (#8883)                                                                                                              |
-| [76962867b](https://github.com/apache/airflow/commit/76962867b5877cf5ffd1b6004453f783c0732ab1) | 2020-06-04  | Fix sql_to_gcs hook gzip of schema_file (#9140)                                                                                                                    |
-| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02  | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106)                                                                                     |
-| [789852546](https://github.com/apache/airflow/commit/78985254683c359f7444a7eb5f6ee4967c37d61f) | 2020-06-01  | Add BigQueryInsertJobOperator (#8868)                                                                                                                              |
-| [29eb68b90](https://github.com/apache/airflow/commit/29eb68b90b5df692ac322be0939af5e7fa9b71bc) | 2020-05-31  | Create guide for Dataproc Operators (#9037)                                                                                                                        |
-| [886afaf62](https://github.com/apache/airflow/commit/886afaf622602aa97f925bc3ee4fc27aa995c445) | 2020-05-29  | Add example dag and system test for LocalFilesystemToGCSOperator (#9043)                                                                                           |
-| [a779c4dfc](https://github.com/apache/airflow/commit/a779c4dfc278d6ece480b012764ea5814dc78dee) | 2020-05-29  | add separate example dags and system tests for GCSToGoogleSheetsOperator (#9066)                                                                                   |
-| [ada26be23](https://github.com/apache/airflow/commit/ada26be23c913796c2ae77b91cb7d113dfec75a6) | 2020-05-29  | Add correct description for dst param in LocalFilesystemToGCSOperator (#9055)                                                                                      |
-| [81b2761b8](https://github.com/apache/airflow/commit/81b2761b86dae2d21a6ee859d49c08d46fea6def) | 2020-05-29  | add example dag and system test for GoogleSheetsToGCSOperator (#9056)                                                                                              |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [3994030ea](https://github.com/apache/airflow/commit/3994030ea678727daaf9c2bfed0ca94a096f8d2a) | 2020-05-26  | Refactor BigQuery operators (#8858)                                                                                                                                |
-| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26  | All classes in backport providers are now importable in Airflow 1.10 (#8991)                                                                                       |
-| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
-| [cf5cf45e1](https://github.com/apache/airflow/commit/cf5cf45e1c0dff9a40e02f0dc221542f974831a7) | 2020-05-23  | Support YAML input for CloudBuildCreateOperator (#8808)                                                                                                            |
-| [499493c5c](https://github.com/apache/airflow/commit/499493c5c5cf324ab8452ead80a10b71ce0c3b14) | 2020-05-19  | [AIRFLOW-6586] Improvements to gcs sensor (#7197)                                                                                                                  |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [841d81664](https://github.com/apache/airflow/commit/841d81664737c25d73d095a7dab5de80d369c87c) | 2020-05-19  | Allow setting the pooling time in DLPHook (#8824)                                                                                                                  |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [15273f0ea](https://github.com/apache/airflow/commit/15273f0ea05ec579c631ce26b5d620233ebdc4d2) | 2020-05-16  | Check for same task instead of Equality to detect Duplicate Tasks (#8828)                                                                                          |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [e1e833bb2](https://github.com/apache/airflow/commit/e1e833bb260879ecb9a1f80f28450a3656c0e598) | 2020-05-13  | Update GoogleBaseHook to not follow 308 and use 60s timeout (#8816)                                                                                                |
-| [8b5491971](https://github.com/apache/airflow/commit/8b54919711a203c3f35d98c6310a55d4df5da590) | 2020-05-12  | Refactor BigQuery hook methods to use python library (#8631)                                                                                                       |
-| [6911dfe83](https://github.com/apache/airflow/commit/6911dfe8372a33df67ce1fdd3c2bca1047718f60) | 2020-05-12  | Fix template fields in Google operators (#8840)                                                                                                                    |
-| [4b06fde0f](https://github.com/apache/airflow/commit/4b06fde0f10ce178b3c336c5d901e3b089f2863d) | 2020-05-12  | Fix Flake8 errors (#8841)                                                                                                                                          |
-| [1d12c347c](https://github.com/apache/airflow/commit/1d12c347cb258e7081804da1f9f5ffdedc003163) | 2020-05-12  | Refactor BigQuery check operators (#8813)                                                                                                                          |
-| [493b685d7](https://github.com/apache/airflow/commit/493b685d7879cfee532390ba0909d4b1d8764267) | 2020-05-10  | Add separate example DAGs and system tests for google cloud speech (#8778)                                                                                         |
-| [79ef8bed8](https://github.com/apache/airflow/commit/79ef8bed891c22eb76adf99158288d1b44426dc0) | 2020-05-10  | Added Upload Multiple Entity Read Files to specified big query dataset (#8610)                                                                                     |
-| [280f1f0c4](https://github.com/apache/airflow/commit/280f1f0c4cc49aba1b2f8b456326795733769d18) | 2020-05-10  | Correctly restore upstream_task_ids when deserializing Operators (#8775)                                                                                           |
-| [58aefb23b](https://github.com/apache/airflow/commit/58aefb23b1d456bbb24876a4e3ff14f25d6274b0) | 2020-05-08  | Added SDFtoGCSOperator (#8740)                                                                                                                                     |
-| [723c52c94](https://github.com/apache/airflow/commit/723c52c942b49b0e8c8fa8667a4a6a45fa249498) | 2020-05-07  | Add documentation for SpannerDeployInstanceOperator (#8750)                                                                                                        |
-| [25ee4211b](https://github.com/apache/airflow/commit/25ee4211b345ce7c19fb7366fd230838c34f1d47) | 2020-05-06  | Support all RuntimeEnvironment parameters in DataflowTemplatedJobStartOperator (#8531)                                                                             |
-| [8d6f1aa4b](https://github.com/apache/airflow/commit/8d6f1aa4b5bb8809ffc55dc0c62e6d0e89f331e5) | 2020-05-05  | Support num_retries field in env var for GCP connection (#8700)                                                                                                    |
-| [67caae0f2](https://github.com/apache/airflow/commit/67caae0f25db4eec42b8e81c85683aabdd8d6c1a) | 2020-05-04  | Add system test for gcs_to_bigquery (#8556)                                                                                                                        |
-| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03  | Add system test and docs for Facebook Ads operators (#8503)                                                                                                        |
-| [a28c66f23](https://github.com/apache/airflow/commit/a28c66f23d373cd0f8bfc765a515f21d4b66a0e9) | 2020-04-30  | [AIRFLOW-4734] Upsert functionality for PostgresHook.insert_rows() (#8625)                                                                                         |
-| [992a24ce4](https://github.com/apache/airflow/commit/992a24ce41067d3b73f293878e71835892cbb632) | 2020-04-28  | Split and improve BigQuery example DAG (#8529)                                                                                                                     |
-| [c1fb28230](https://github.com/apache/airflow/commit/c1fb28230fa0d36ef86c452c70254b253a113f9c) | 2020-04-28  | Refactor BigQueryHook dataset operations (#8477)                                                                                                                   |
-| [e8d0f8fea](https://github.com/apache/airflow/commit/e8d0f8feab0ec08e248cd381359112ad6a832f5b) | 2020-04-26  | Improve idempodency in CloudDataTransferServiceCreateJobOperator (#8430)                                                                                           |
-| [37fdfa977](https://github.com/apache/airflow/commit/37fdfa9775f43a5fa15de9c53ab33ecdf97513c5) | 2020-04-26  | [AIRFLOW-6281] Create guide for GCS to GCS transfer operators (#8442)                                                                                              |
-| [14b22e6ff](https://github.com/apache/airflow/commit/14b22e6ffeb3af1f68e8362a1d0061b41364019c) | 2020-04-25  | Add hook and operator for Google Cloud Life Sciences (#8481)                                                                                                       |
-| [72ddc94d1](https://github.com/apache/airflow/commit/72ddc94d1ee08b414102e0b8ac197a3d8e965707) | 2020-04-23  | Pass location using parmamter in Dataflow integration (#8382)                                                                                                      |
-| [912aa4b42](https://github.com/apache/airflow/commit/912aa4b4237695275db6379cf2f0a633ea6087bc) | 2020-04-23  | Added GoogleDisplayVideo360DownloadLineItemsOperator (#8174)                                                                                                       |
-| [57c8c0583](https://github.com/apache/airflow/commit/57c8c05839f66ed2909b1bee8ff6976432db82aa) | 2020-04-22  | Use python client in BQ hook create_empty_table/dataset and table_exists (#8377)                                                                                   |
-| [5d3a7eef3](https://github.com/apache/airflow/commit/5d3a7eef30b30fa466d8173f13abe4c356d73aef) | 2020-04-20  | Allow multiple extra_packages in Dataflow (#8394)                                                                                                                  |
-| [79c99b1b6](https://github.com/apache/airflow/commit/79c99b1b6ae2ff5b0c8ab892f7f3fb1b44724121) | 2020-04-18  | Added location parameter to BigQueryCheckOperator (#8273)                                                                                                          |
-| [79d3f33c1](https://github.com/apache/airflow/commit/79d3f33c1b65c9c7e7b1a75e25d38cab9aa4517f) | 2020-04-17  | Clean up temporary files in Dataflow operators (#8313)                                                                                                             |
-| [efcffa323](https://github.com/apache/airflow/commit/efcffa323ddb5aa9f5907aa86808f3f3b4f5bd87) | 2020-04-16  | Add Dataproc SparkR Example (#8240)                                                                                                                                |
-| [b198a1fa9](https://github.com/apache/airflow/commit/b198a1fa94c44228dc7358552aeb6a5371ae0da2) | 2020-04-15  | Create guide for BigQuery operators (#8276)                                                                                                                        |
-| [2636cc932](https://github.com/apache/airflow/commit/2636cc932c3b156644edd46635cf9ff995c83159) | 2020-04-14  | Raise exception when GCP credential doesn&#39;t support account impersonation (#8213)                                                                                  |
-| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14  | Added Facebook Ads Operator #7887 (#8008)                                                                                                                          |
-| [8cae07ea1](https://github.com/apache/airflow/commit/8cae07ea1873a90516120d9ffbd28e7fdd2f78a4) | 2020-04-14  | fixed typo (#8294)                                                                                                                                                 |
-| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13  | Less aggressive eager upgrade of requirements (#8267)                                                                                                              |
-| [1fd9ed384](https://github.com/apache/airflow/commit/1fd9ed3840361afa1e9456ccb0dfd5a60fba4e85) | 2020-04-13  | Add mypy plugin for decorators. (#8145)                                                                                                                            |
-| [327b0a9f7](https://github.com/apache/airflow/commit/327b0a9f77bbcbe3f977a37de04264c2eff4bee1) | 2020-04-13  | Added GoogleDisplayVideo360UploadLineItemsOperator (#8216)                                                                                                         |
-| [bb5e403a3](https://github.com/apache/airflow/commit/bb5e403a320e7377e5040cb180f61b4f5a9ea558) | 2020-04-10  | Honor schema type for MySQL to GCS data pre-process (#8090)                                                                                                        |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)                                                                                                   |
-| [3fc89f29f](https://github.com/apache/airflow/commit/3fc89f29f5bcd1529089fa6cb9c44843614f9ec5) | 2020-04-06  | [AIRFLOW-7106] Cloud data fusion integration - Allow to pass args to start pipeline (#7849)                                                                        |
-| [7ef75d239](https://github.com/apache/airflow/commit/7ef75d2393f30d155de550e6d1ee8c055e2abfee) | 2020-04-03  | [AIRFLOW-7117] Honor self.schema in sql_to_gcs as schema to upload (#8049)                                                                                         |
-| [ed2bc0057](https://github.com/apache/airflow/commit/ed2bc00576b39a88e3e1fb79092494f4bfdcbf5c) | 2020-04-02  | Add Google Ads list accounts operator (#8007)                                                                                                                      |
-| [3808a6206](https://github.com/apache/airflow/commit/3808a6206e70d4af84b39ea7078df54f02c1435e) | 2020-04-01  | Unify Google class/package names (#8033)                                                                                                                           |
-| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31  | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011)                                                                                        |
-| [8e8978007](https://github.com/apache/airflow/commit/8e897800716c8ccedd1c53f2d083cb295786aa50) | 2020-03-31  | Add more refactor steps for providers.google (#8010)                                                                                                               |
-| [aae3b8fb2](https://github.com/apache/airflow/commit/aae3b8fb27870cb3cfba5ed73e35e08d520ef014) | 2020-03-31  | Individual package READMEs (#8012)                                                                                                                                 |
-| [779023968](https://github.com/apache/airflow/commit/779023968f983c91701f687bc823dc338934cdad) | 2020-03-30  | [AIRFLOW-7075] Operators for storing information from GCS into GA (#7743)                                                                                          |
-| [49abce521](https://github.com/apache/airflow/commit/49abce52178c81954f8a25608f70ffe02fcf7b19) | 2020-03-30  | Improve system tests for Cloud Build (#8003)                                                                                                                       |
-| [0f19a930d](https://github.com/apache/airflow/commit/0f19a930d1a7dec2a96bab0de144829f83cc0626) | 2020-03-29  | Remove GKEStartPodOperator when backporting (#7908)                                                                                                                |
-| [0e1c238b2](https://github.com/apache/airflow/commit/0e1c238b2fff3a092c93368125bc8d82abc4b308) | 2020-03-28  | Get Airflow Variables from GCP Secrets Manager (#7946)                                                                                                             |
-| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28  | Make BaseSecretsBackend.build_path generic (#7948)                                                                                                                 |
-| [01f99426f](https://github.com/apache/airflow/commit/01f99426fddd2a24552f352edcb271fa78cf3b15) | 2020-03-28  | Add download/upload operators for GCS and Google Sheets (#7866)                                                                                                    |
-| [892522f8e](https://github.com/apache/airflow/commit/892522f8e2aeedc1ad842a08aaea967b0cae077f) | 2020-03-26  | Change signature of GSheetsHook methods (#7853)                                                                                                                    |
-| [bfd425157](https://github.com/apache/airflow/commit/bfd425157a746402b516f8fc9e48f4ddccd794ce) | 2020-03-26  | Improve idempotency in MLEngineHook.create_model (#7811)                                                                                                           |
-| [f9c226343](https://github.com/apache/airflow/commit/f9c226343d94a7732da280d1dd086bf1ba291c77) | 2020-03-26  | Fix CloudSecretsManagerBackend invalid connections_prefix (#7861)                                                                                                  |
-| [e3920f12f](https://github.com/apache/airflow/commit/e3920f12f483b53950507c50f6ab6a4318072859) | 2020-03-26  | Improve setUp/tearDown in Cloud Firestore system test (#7862)                                                                                                      |
-| [8ba8a7295](https://github.com/apache/airflow/commit/8ba8a7295a31f6b44894bfcaea36fa93b8d8c0d0) | 2020-03-26  | Improve example DAGs for Cloud Memorystore (#7855)                                                                                                                 |
-| [f7d1a437c](https://github.com/apache/airflow/commit/f7d1a437c17461b5ab768b75d58f0cb026b2a818) | 2020-03-26  | Fix CloudMemorystoreCreateInstanceAndImportOperator operator (#7856)                                                                                               |
-| [beef6c230](https://github.com/apache/airflow/commit/beef6c230e4ff266af7c16b639bfda659b2bf6c0) | 2020-03-26  | Improve authorization in GCP system tests (#7863)                                                                                                                  |
-| [5f165f3e4](https://github.com/apache/airflow/commit/5f165f3e4231ebd420ce643211a93e1fecf4877e) | 2020-03-26  | [AIRFLOW-5801] Get GCP credentials from file instead of JSON blob (#7869)                                                                                          |
-| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25  | Standardize SecretBackend class names (#7846)                                                                                                                      |
-| [1982c3fdc](https://github.com/apache/airflow/commit/1982c3fdca1f04cfc41fc5b5e285d8f01c6b76ab) | 2020-03-24  | Run Dataflow for ML Engine summary in venv (#7809)                                                                                                                 |
-| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23  | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830)                                                                                                     |
-| [529db07b2](https://github.com/apache/airflow/commit/529db07b2ee73d886e37e8b3415462c730187b15) | 2020-03-23  | Improve Google PubSub hook publish method (#7831)                                                                                                                  |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                                                                                   |
-| [a001489b5](https://github.com/apache/airflow/commit/a001489b5928ebfc35f990a29d1c9c2ecb80bd61) | 2020-03-23  | Improve example DAG for ML Engine (#7810)                                                                                                                          |
-| [9e5a8e7f8](https://github.com/apache/airflow/commit/9e5a8e7f83cf2368315fce62f8d81304f7ba2f04) | 2020-03-23  | Add call to Super class in &#39;google&#39; providers (#7823)                                                                                                              |
-| [b86bf79bf](https://github.com/apache/airflow/commit/b86bf79bff615e61de98bead4d02eace5690d5fb) | 2020-03-23  | Fix typo in GCP credentials_provider&#39;s docstring (#7818)                                                                                                           |
-| [56c013ce9](https://github.com/apache/airflow/commit/56c013ce922eb18e5f7dd4410986afbcc6f29025) | 2020-03-23  | Add missing docstring in BigQueryHook.create_empty_table (#7817)                                                                                                   |
-| [426a79847](https://github.com/apache/airflow/commit/426a79847ced832ca3f67c135fd8830ebf1de7d2) | 2020-03-23  | Imrove support for laatest API in  MLEngineStartTrainingJobOperator (#7812)                                                                                        |
-| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23  | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795)                                                                                                  |
-| [27dac00e1](https://github.com/apache/airflow/commit/27dac00e125b87626a0b87074d61e6d38031bf47) | 2020-03-22  | [AIRFLOW-7099] Improve system test for cloud transfer service (#7794)                                                                                              |
-| [0daf5d729](https://github.com/apache/airflow/commit/0daf5d729acef4e9aef5226452dff774e80430cd) | 2020-03-22  | Add ability to specify a maximum modified time for objects in GCSToGCSOperator (#7791)                                                                             |
-| [c8088c2bd](https://github.com/apache/airflow/commit/c8088c2bd70a16605a5d4b1a66a22309359d6712) | 2020-03-20  | [AIRFLOW-7100] Add GoogleAnalyticsGetAdsLinkOperator (#7781)                                                                                                       |
-| [5106a2931](https://github.com/apache/airflow/commit/5106a29314b413d168bcba7a64bf91c04fdb5dfe) | 2020-03-20  | [AIRFLOW-6752] Add GoogleAnalyticsRetrieveAdsLinksListOperator (#7748)                                                                                             |
-| [759ce2a80](https://github.com/apache/airflow/commit/759ce2a80c95832fe4773c9f4fde23e1b03cbc6f) | 2020-03-20  | [AIRFLOW-6978] Add PubSubPullOperator (#7766)                                                                                                                      |
-| [6b9b214e4](https://github.com/apache/airflow/commit/6b9b214e4c3b3afa8ea2e1a5c1e24993013d60ac) | 2020-03-20  | [AIRFLOW-6732] Add GoogleAdsHook and GoogleAdsToGcsOperator (#7692)                                                                                                |
-| [b11891696](https://github.com/apache/airflow/commit/b11891696946d1461174b385c88d6af8abb99768) | 2020-03-19  | [AIRFLOW-7069] Fix cloudsql system tests (#7770)                                                                                                                   |
-| [ae854cae5](https://github.com/apache/airflow/commit/ae854cae5a2cf8cae37edf7e0813ad01bccfbc30) | 2020-03-19  | [AIRFLOW-7082] Remove catch_http_exception decorator in GCP hooks (#7756)                                                                                          |
-| [7e1e954d2](https://github.com/apache/airflow/commit/7e1e954d23ce272b0a71188f0f535e20d54be443) | 2020-03-19  | [AIRFLOW-7085] Cache credentials, project_id in GCP Base Hook (#7759)                                                                                              |
-| [6e21c139b](https://github.com/apache/airflow/commit/6e21c139b3cce3f895040939f0b02e3e0ba36141) | 2020-03-19  | [AIRFLOW-XXXX] Fix reference to GCP classes in guides (#7762)                                                                                                      |
-| [ce022a3f7](https://github.com/apache/airflow/commit/ce022a3f72b7735087d4c3bbe81d293a0ab75327) | 2020-03-19  | [AIRFLOW-XXXX] Add cross-references for operators guide (#7760)                                                                                                    |
-| [029c84e55](https://github.com/apache/airflow/commit/029c84e5527b6db6bdbdbe026f455da325bedef3) | 2020-03-18  | [AIRFLOW-5421] Add Presto to GCS transfer operator (#7718)                                                                                                         |
-| [63a3102ed](https://github.com/apache/airflow/commit/63a3102ede8fb8f764d251b20cad5ee5bef84f50) | 2020-03-18  | [AIRFLOW-7064] Add CloudFirestoreExportDatabaseOperator (#7725)                                                                                                    |
-| [73305c7bd](https://github.com/apache/airflow/commit/73305c7bd57f14444804c13b8b290f479832d3db) | 2020-03-18  | [AIRFLOW-7081] Remove env variables from GCP guide (#7755)                                                                                                         |
-| [60fdbf6d9](https://github.com/apache/airflow/commit/60fdbf6d9255d34a8967400e9585b1cd5d29d3e9) | 2020-03-18  | [AIRFLOW-5610] Add ability to specify multiple objects to copy in GCSToGCSOperator (#7728)                                                                         |
-| [de7e934ca](https://github.com/apache/airflow/commit/de7e934ca3f21ce82f67accf92811b3ac044476f) | 2020-03-17  | [AIRFLOW-7079] Remove redundant code for storing template_fields (#7750)                                                                                           |
-| [0de0347b2](https://github.com/apache/airflow/commit/0de0347b27a961c46ee49da6dfa9205321657749) | 2020-03-17  | [AIRFLOW-6855]: Escape project_dataset_table in SQL query in gcs to bq … (#7475)                                                                                   |
-| [91557c6f8](https://github.com/apache/airflow/commit/91557c6f87529c010b8ad1110ece35fd7fd751e4) | 2020-03-17  | [AIRFLOW-7073] GKEStartPodOperator always use connection credentials (#7738)                                                                                       |
-| [51161dbd9](https://github.com/apache/airflow/commit/51161dbd9de0c966016cec4d5036877890daee7c) | 2020-03-16  | [AIRFLOW-5664] Store timestamps with microseconds precision (#6354)                                                                                                |
-| [2bc020c43](https://github.com/apache/airflow/commit/2bc020c43112dd3a769311de8d5012e8e8f399ee) | 2020-03-14  | [AIRFLOW-7055] Verbose logging option for google provider (#7711)                                                                                                  |
-| [c997cab42](https://github.com/apache/airflow/commit/c997cab42d8695ac444e63dfe4b948a7ea82ed89) | 2020-03-13  | [AIRFLOW-6724] Add Google Analytics 360 Accounts Retrieve Operator (#7630)                                                                                         |
-| [137896f32](https://github.com/apache/airflow/commit/137896f326cd29b59902a887e4c4e58f940ff62b) | 2020-03-12  | [AIRFLOW-7034] Remove feature: Assigning Dag to task using Bitshift Op (#7685)                                                                                     |
-| [1f77f943d](https://github.com/apache/airflow/commit/1f77f943d5d85f66b6a988e8ef6506525eaf4732) | 2020-03-10  | [AIRFLOW-6980] Improve system tests and building providers package (#7615)                                                                                         |
-| [bf9b6b6d7](https://github.com/apache/airflow/commit/bf9b6b6d70455352bbf807871c8eeb6324be7e54) | 2020-03-09  | [AIRFLOW-5013] Add GCP Data Catalog Hook and operators (#7664)                                                                                                     |
-| [e5130dc9f](https://github.com/apache/airflow/commit/e5130dc9fe89187e95071e678ea3b46600866762) | 2020-03-09  | [AIRFLOW-2911] Add job cancellation capability to Dataflow service (#7659)                                                                                         |
-| [faf0df4b9](https://github.com/apache/airflow/commit/faf0df4b9460b7f037ee390addbd2c6effcae013) | 2020-03-09  | [AIRFLOW-XXXX] Fix upsert operator in BQ example DAG (#7666)                                                                                                       |
-| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07  | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506)                                                                                                   |
-| [b5b9795f0](https://github.com/apache/airflow/commit/b5b9795f0446bb484a91ee485f49ea456f1c26c4) | 2020-03-07  | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (fix) (#7624)                                                                                               |
-| [6b65038fb](https://github.com/apache/airflow/commit/6b65038fb409ba1040e70305444816d8f5cfdc47) | 2020-03-06  | [AIRFLOW-6990] Improve system tests for Google Marketing Platform (#7631)                                                                                          |
-| [755fe5224](https://github.com/apache/airflow/commit/755fe52249ba1cd965cf2f87fa7a428b8197a38a) | 2020-03-05  | [AIRFLOW-6915] Add AI Platform Console Link for MLEngineStartTrainingJobOperator (#7535)                                                                           |
-| [cb2f33911](https://github.com/apache/airflow/commit/cb2f339116cf2093da447748892fac68aecbb888) | 2020-03-04  | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (#7609)                                                                                                     |
-| [09fea3ce8](https://github.com/apache/airflow/commit/09fea3ce8e4d7816281963bb8f2cb06f4de6db5c) | 2020-03-04  | [AIRFLOW-6977] Fix BigQuery DTS example DAG (#7612)                                                                                                                |
-| [8230ccc48](https://github.com/apache/airflow/commit/8230ccc48b157c89b2b893d42c6fe1523b83363a) | 2020-03-04  | [AIRFLOW-6926] Fix Google Tasks operators return types and idempotency (#7547)                                                                                     |
-| [0d1e3088a](https://github.com/apache/airflow/commit/0d1e3088aa9f16eaeeb7b18eccec8f35c79a53df) | 2020-03-04  | [AIRFLOW-6970] Improve GCP Video Intelligence system tests (#7604)                                                                                                 |
-| [ab6bb0012](https://github.com/apache/airflow/commit/ab6bb0012c38740b76e864d42d299c5c7a9972a3) | 2020-03-03  | [AIRFLOW-6971] Fix return type in CloudSpeechToTextRecognizeSpeechOperator (#7607)                                                                                 |
-| [3db4ade3d](https://github.com/apache/airflow/commit/3db4ade3dc9660c21c28187100a22008552f2bd3) | 2020-02-29  | [AIRFLOW-6924] Fix Google DLP operators return types (#7546)                                                                                                       |
-| [008b4bab1](https://github.com/apache/airflow/commit/008b4bab14222da068b737d6332db4963b994007) | 2020-02-27  | [AIRFLOW-6730] Use total_seconds instead of seconds (#7363)                                                                                                        |
-| [bb552b2d9](https://github.com/apache/airflow/commit/bb552b2d9fd595cc3eb1b3a2f637f29b814878d7) | 2020-02-25  | [AIRFLOW-6908] Lazy load AirflowException (#7528)                                                                                                                  |
-| [d1a34246a](https://github.com/apache/airflow/commit/d1a34246ac593901f8599b102dc3d7efa4dd61e4) | 2020-02-25  | [AIRFLOW-6593] Add GCP Stackdriver Alerting Hooks and Operators (#7322)                                                                                            |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [dcf874352](https://github.com/apache/airflow/commit/dcf87435219307d4e916a8abc2b819ad75e2b1cf) | 2020-02-24  | [AIRFLOW-6894] Prevent db query in example_dags (#7516)                                                                                                            |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [35b961637](https://github.com/apache/airflow/commit/35b9616378d1cfba7c2eb3c71e20acb6734b7c77) | 2020-02-21  | [AIRFLOW-4973] Add Cloud Data Fusion Pipeline integration (#7486)                                                                                                  |
-| [aff3a361b](https://github.com/apache/airflow/commit/aff3a361b4092212c0757f9ce88fa2e40d25d1f4) | 2020-02-20  | [AIRFLOW-6558] Campaign Manager operators for conversions (#7420)                                                                                                  |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412)                                                                           |
-| [5b199cb86](https://github.com/apache/airflow/commit/5b199cb86be5b1aefbd8620185033d6f635713c1) | 2020-02-17  | [AIRFLOW-XXXX] Typo in example_bigquery DAG (#7429)                                                                                                                |
-| [2c9345a8e](https://github.com/apache/airflow/commit/2c9345a8e03d37a2676efa2f2ea7e8b7814c5345) | 2020-02-17  | [AIRFLOW-6759] Added MLEngine operator/hook to cancel MLEngine jobs (#7400)                                                                                        |
-| [946bdc23c](https://github.com/apache/airflow/commit/946bdc23c039637b0383e1269f99bdd1b2426565) | 2020-02-16  | [AIRFLOW-6405] Add GCP BigQuery Table Upsert Operator (#7126)                                                                                                      |
-| [2381c820c](https://github.com/apache/airflow/commit/2381c820c8aaeffc1c9b4ed47832038833400eb8) | 2020-02-13  | [AIRFLOW-6505] Let emoji encoded properly for json.dumps() (#7399)                                                                                                 |
-| [04c1fefbf](https://github.com/apache/airflow/commit/04c1fefbf26a73ed13881d2ec14eada48028ff72) | 2020-02-03  | [AIRFLOW-6676] added GCSDeleteBucketOperator (#7307)                                                                                                               |
-| [a0252748f](https://github.com/apache/airflow/commit/a0252748ff312daede15c6f0a3d39e16c774461c) | 2020-02-03  | [AIRFLOW-6717] Remove non-existent field from templated_fields (#7340)                                                                                             |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [9d8d07557](https://github.com/apache/airflow/commit/9d8d0755789d4aeadc5d3015f3cdde62901f85b8) | 2020-02-03  | [AIRFLOW-6715] Fix Google Cloud DLP Example DAG (#7337)                                                                                                            |
-| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02  | [AIRFLOW-6708] Set unique logger names (#7330)                                                                                                                     |
-| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30  | [AIRFLOW-6682] Move GCP classes to providers package (#7295)                                                                                                       |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29  | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286)                                                                        |
-| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28  | [AIRFLOW-6656] Fix AIP-21 moving (#7272)                                                                                                                           |
-| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27  | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)                                                                                           |
-| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21  | [AIRFLOW-6610] Move software classes to providers package (#7231)                                                                                                  |
-| [f4d3e5e54](https://github.com/apache/airflow/commit/f4d3e5e54507f52a00a9b95aa48eb0260e17224d) | 2020-01-13  | [AIRFLOW-6102] [AIP-21] Rename Dataproc operators (#7151)                                                                                                          |
-| [e7bf8ecb4](https://github.com/apache/airflow/commit/e7bf8ecb48f0299af8091433535ac573c2afd1cf) | 2020-01-13  | [AIRFLOW-6119] [AIP-21] Rename GCS operators, hooks and sensors (#7125)                                                                                            |
-| [5b6772cb8](https://github.com/apache/airflow/commit/5b6772cb8391b248cb4b7be5fd3d5c035280fac1) | 2020-01-09  | [AIRFLOW-6125] [AIP-21] Rename S3 operator and SFTP operator (#7112)                                                                                               |
-| [4f8592ae8](https://github.com/apache/airflow/commit/4f8592ae8f52ab7f42623d3b43eef0928c9aafb2) | 2020-01-08  | [AIRFLOW-6118] [AIP-21] Rename Pubsub operators and hook (#7046)                                                                                                   |
-| [20299473f](https://github.com/apache/airflow/commit/20299473f11add6531f607256ee8a0f7f9507ab8) | 2020-01-03  | [AIRFLOW-6115] [AIP-21] Rename GCP vision operators (#7020)                                                                                                        |
-| [18e8cea4e](https://github.com/apache/airflow/commit/18e8cea4e7487a7dfefc03661e5ebe54c4104ead) | 2020-01-03  | [AIRFLOW-6428] Fix import path for airflow.utils.dates.days_ago in Example DAGs (#7007)                                                                            |
-| [95087af14](https://github.com/apache/airflow/commit/95087af14091f28a83ced8ff1860b86dfd93f93d) | 2019-12-31  | [AIRFLOW-6110] [AIP-21] Rename natural_language service (#6968)                                                                                                    |
-| [69629a5a9](https://github.com/apache/airflow/commit/69629a5a948ab2c4ac04a4a4dca6ac86d19c11bd) | 2019-12-09  | [AIRFLOW-5807] Move SFTP from contrib to providers. (#6464)                                                                                                        |
-| [25e9047a4](https://github.com/apache/airflow/commit/25e9047a4a4da5fad4f85c366e3a6262c0a4f68e) | 2019-12-09  | [AIRFLOW-6193] Do not use asserts in Airflow main code (#6749)                                                                                                     |
-| [ed0a14f32](https://github.com/apache/airflow/commit/ed0a14f321b9dab3554ae395c11c147258536ce8) | 2019-12-09  | [AIRFLOW-6120] Rename GoogleCloudBaseHook (#6734)                                                                                                                  |
-| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01  | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701)                                                                                                  |
-| [03c870a61](https://github.com/apache/airflow/commit/03c870a6172ab232af6319a30ad8d46622359b10) | 2019-11-26  | [AIRFLOW-6010] Remove cyclic imports and pylint hacks (#6601)                                                                                                      |
-| [5c4cfea8c](https://github.com/apache/airflow/commit/5c4cfea8c0f488496c1cbcc4c6c5db13d8210979) | 2019-11-15  | [AIRFLOW-5718] Add SFTPToGoogleCloudStorageOperator (#6393)                                                                                                        |
-| [44a8c37a9](https://github.com/apache/airflow/commit/44a8c37a9a8668469aa825ad21057cca6ac2c186) | 2019-11-13  | [AIRFLOW-XXX] Fix the docstring for Dataproc get_job method (#6581)                                                                                                |
-| [d633d3ac4](https://github.com/apache/airflow/commit/d633d3ac44c395e6c43cd388f98fba1ce1c435a3) | 2019-11-13  | [AIRFLOW-5691] Rewrite Dataproc operators to use python library (#6371)                                                                                            |
-| [d985c02d9](https://github.com/apache/airflow/commit/d985c02d9fa3d9ec946abc1735b0551fd61fb9f0) | 2019-11-05  | [AIRFLOW-XXX] Add How-To-Guide to GCP PubSub (#6497)                                                                                                               |
-| [a296cdabd](https://github.com/apache/airflow/commit/a296cdabdb9c9c65cf9a48329cb776aed5c82d43) | 2019-11-04  | [AIRFLOW-5743] Move Google PubSub to providers package (#6476)                                                                                                     |
-| [470b2a779](https://github.com/apache/airflow/commit/470b2a779d031406a3d5925f2fa2ec40e5c3bccb) | 2019-10-30  | [AIRFLOW-5741] Move Cloud Natural Language to providers (#6421)                                                                                                    |
-| [f2caa451f](https://github.com/apache/airflow/commit/f2caa451fc2b8ee59163314f9ec1cc372acbadf1) | 2019-10-27  | [AIRFLOW-5742] Move Google Cloud Vision to providers package (#6424)                                                                                               |
-| [16d7accb2](https://github.com/apache/airflow/commit/16d7accb22c866d4fbf368e4d979dc1c4a41d93c) | 2019-10-22  | [AIRFLOW-4971] Add Google Display &amp; Video 360 integration (#6170)                                                                                                  |
-| [4e661f535](https://github.com/apache/airflow/commit/4e661f535dea613f9b2e0075676f9a73a97461fe) | 2019-10-22  | [AIRFLOW-5379] Add Google Search Ads 360 operators (#6228)                                                                                                         |
-| [19e32b4e2](https://github.com/apache/airflow/commit/19e32b4e2c798f662e5d8d1e7c65036c5e7ac125) | 2019-10-18  | [AIRFLOW-5656] Rename provider to providers module (#6333)                                                                                                         |
diff --git a/airflow/providers/google/provider.yaml b/airflow/providers/google/provider.yaml
index 190edba..9961b13 100644
--- a/airflow/providers/google/provider.yaml
+++ b/airflow/providers/google/provider.yaml
@@ -28,6 +28,7 @@ description: |
       - `Google Workspace <https://workspace.google.pl/>`__ (formerly Google Suite)
 
 versions:
+  - 2.0.0
   - 1.0.0
 
 integrations:
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/grpc/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/grpc/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/grpc/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/grpc/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/grpc/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 0dc1f56..0000000
--- a/airflow/providers/grpc/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,50 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)             |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [fcfc7f124](https://github.com/apache/airflow/commit/fcfc7f12421bd35a366324fe7814c90da8de5735) | 2020-11-04  | Improve reading SSL credentials file in GRPC Hook (#12094)                     |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                 |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)           |
-| [4eddce22a](https://github.com/apache/airflow/commit/4eddce22a3e0eb605f5661204a005262bbaa54cd) | 2020-07-21  | Add typing for grpc provider (#9884)                                           |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30  | Remove sql like function in base_hook (#7901)                                  |
-| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23  | Add missing call to Super class in &#39;http&#39;, &#39;grpc&#39; &amp; &#39;slack&#39; providers (#7826)  |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02  | [AIRFLOW-6708] Set unique logger names (#7330)                                 |
-| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27  | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268)     |
diff --git a/airflow/providers/grpc/README.md b/airflow/providers/grpc/README.md
deleted file mode 100644
index 29480f8..0000000
--- a/airflow/providers/grpc/README.md
+++ /dev/null
@@ -1,145 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-grpc
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [Moved operators](#moved-operators)
-    - [Hooks](#hooks)
-        - [New hooks](#new-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `grpc` provider. All classes for this provider package
-are in `airflow.providers.grpc` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-grpc`
-
-## PIP requirements
-
-| PIP package          | Version required   |
-|:---------------------|:-------------------|
-| google-auth          | &gt;=1.0.0, &lt;2.0.0dev |
-| google-auth-httplib2 | &gt;=0.0.1            |
-| grpcio               | &gt;=1.15.0           |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `grpc` provider
-are in the `airflow.providers.grpc` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.grpc` package                                                               | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                   |
-|:----------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.grpc.GrpcOperator](https://github.com/apache/airflow/blob/master/airflow/providers/grpc/operators/grpc.py) | [contrib.operators.grpc_operator.GrpcOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/grpc_operator.py) |
-
-
-## Hooks
-
-
-### New hooks
-
-| New Airflow 2.0 hooks: `airflow.providers.grpc` package                                                   |
-|:----------------------------------------------------------------------------------------------------------|
-| [hooks.grpc.GrpcHook](https://github.com/apache/airflow/blob/master/airflow/providers/grpc/hooks/grpc.py) |
-
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)             |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [fcfc7f124](https://github.com/apache/airflow/commit/fcfc7f12421bd35a366324fe7814c90da8de5735) | 2020-11-04  | Improve reading SSL credentials file in GRPC Hook (#12094)                     |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                           |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03  | Use PyUpgrade to use Python 3.6 features (#11447)                              |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                 |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                    |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)           |
-| [4eddce22a](https://github.com/apache/airflow/commit/4eddce22a3e0eb605f5661204a005262bbaa54cd) | 2020-07-21  | Add typing for grpc provider (#9884)                                           |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30  | Remove sql like function in base_hook (#7901)                                  |
-| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23  | Add missing call to Super class in &#39;http&#39;, &#39;grpc&#39; &amp; &#39;slack&#39; providers (#7826)  |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                       |
-| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02  | [AIRFLOW-6708] Set unique logger names (#7330)                                 |
-| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27  | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268)     |
diff --git a/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
index 86dadba..496d04e 100644
--- a/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
+++ b/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
@@ -4,7 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
 | [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
 | [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
 | [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
diff --git a/airflow/providers/hashicorp/BACKPORT_PROVIDER_README.md b/airflow/providers/hashicorp/BACKPORT_PROVIDER_README.md
index 4f1b30e..994d1bf 100644
--- a/airflow/providers/hashicorp/BACKPORT_PROVIDER_README.md
+++ b/airflow/providers/hashicorp/BACKPORT_PROVIDER_README.md
@@ -113,7 +113,7 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)             |
 | [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
 | [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
 | [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/hashicorp/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/hashicorp/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/hashicorp/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/hashicorp/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/hashicorp/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 4e60d70..0000000
--- a/airflow/providers/hashicorp/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,56 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [36a9b0f48](https://github.com/apache/airflow/commit/36a9b0f48baf4a8ef8fc02a450a279948a8c0f02) | 2020-11-20  | Fix the default value for VaultBackend&#39;s config_path (#12518)                  |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)    |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [dd2442b1e](https://github.com/apache/airflow/commit/dd2442b1e66d4725e7193e0cab0548a4d8c71fbd) | 2020-11-02  | Vault with optional Variables or Connections (#11736)                          |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                               |
-| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28  | Update Google Cloud branding (#10615)                                          |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                 |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08  | Get Airflow configs with sensitive data from Secret Backends (#9645)           |
-| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06  | Upgrade to latest pre-commit checks (#9686)                                    |
-| [a99aaeb49](https://github.com/apache/airflow/commit/a99aaeb49672e913d5ff79606237f6f3614fc8f5) | 2020-07-03  | Allow setting Hashicorp Vault token from File (#9644)                          |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [df693e0e3](https://github.com/apache/airflow/commit/df693e0e3138f6601c4776cd529d8cb7bcde2f90) | 2020-06-19  | Add more authentication options for HashiCorp Vault classes (#8974)            |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [d47e070a7](https://github.com/apache/airflow/commit/d47e070a79b574cca043ca9c06f91d47eecb3040) | 2020-06-17  | Add HashiCorp Vault Hook (split-out from Vault secret backend) (#9333)         |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [d8cb0b5dd](https://github.com/apache/airflow/commit/d8cb0b5ddb02d194742e374d9ac90dd8231f6e80) | 2020-05-04  | Support k8s auth method in Vault Secrets provider (#8640)                      |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)               |
-| [c1c88abfe](https://github.com/apache/airflow/commit/c1c88abfede7a36c3b1d1b511fbc6c03af46d363) | 2020-03-28  | Get Airflow Variables from Hashicorp Vault (#7944)                             |
-| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28  | Make BaseSecretsBackend.build_path generic (#7948)                             |
-| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25  | Standardize SecretBackend class names (#7846)                                  |
-| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23  | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830)                 |
-| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23  | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795)              |
-| [a44beaf5b](https://github.com/apache/airflow/commit/a44beaf5bddae2a8de0429af45be5ff78a7d4d4e) | 2020-03-19  | [AIRFLOW-7076] Add support for HashiCorp Vault as Secrets Backend (#7741)      |
diff --git a/airflow/providers/hashicorp/README.md b/airflow/providers/hashicorp/README.md
deleted file mode 100644
index 2a54b7f..0000000
--- a/airflow/providers/hashicorp/README.md
+++ /dev/null
@@ -1,165 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-hashicorp
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [PIP requirements](#pip-requirements)
-- [Cross provider package dependencies](#cross-provider-package-dependencies)
-- [Provider class summary](#provider-classes-summary)
-    - [Hooks](#hooks)
-        - [New hooks](#new-hooks)
-    - [Secrets](#secrets)
-        - [Moved secrets](#moved-secrets)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `hashicorp` provider. All classes for this provider package
-are in `airflow.providers.hashicorp` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-hashicorp`
-
-## PIP requirements
-
-| PIP package   | Version required   |
-|:--------------|:-------------------|
-| hvac          | ~=0.10             |
-
-## Cross provider package dependencies
-
-Those are dependencies that might be needed in order to use all the features of the package.
-You need to install the specified backport providers package in order to use them.
-
-You can install such cross-provider dependencies when installing from PyPI. For example:
-
-```bash
-pip install apache-airflow-providers-hashicorp[google]
-```
-
-| Dependent package                                                                           | Extra   |
-|:--------------------------------------------------------------------------------------------|:--------|
-| [apache-airflow-providers-google](https://pypi.org/project/apache-airflow-providers-google) | google  |
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `hashicorp` provider
-are in the `airflow.providers.hashicorp` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Hooks
-
-
-### New hooks
-
-| New Airflow 2.0 hooks: `airflow.providers.hashicorp` package                                                      |
-|:------------------------------------------------------------------------------------------------------------------|
-| [hooks.vault.VaultHook](https://github.com/apache/airflow/blob/master/airflow/providers/hashicorp/hooks/vault.py) |
-
-
-
-## Secrets
-
-
-
-### Moved secrets
-
-| Airflow 2.0 secrets: `airflow.providers.hashicorp` package                                                               | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                   |
-|:-------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------|
-| [secrets.vault.VaultBackend](https://github.com/apache/airflow/blob/master/airflow/providers/hashicorp/secrets/vault.py) | [contrib.secrets.hashicorp_vault.VaultBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/hashicorp_vault.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                        |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)           |
-| [36a9b0f48](https://github.com/apache/airflow/commit/36a9b0f48baf4a8ef8fc02a450a279948a8c0f02) | 2020-11-20  | Fix the default value for VaultBackend&#39;s config_path (#12518)                  |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                     |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                     |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)    |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                 |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                         |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                        |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212) |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)             |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                  |
-| [dd2442b1e](https://github.com/apache/airflow/commit/dd2442b1e66d4725e7193e0cab0548a4d8c71fbd) | 2020-11-02  | Vault with optional Variables or Connections (#11736)                          |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                     |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)             |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                   |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                     |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                           |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                   |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                               |
-| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28  | Update Google Cloud branding (#10615)                                          |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                    |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                        |
-| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25  | Remove all &#34;noinspection&#34; comments native to IntelliJ (#10525)                 |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                     |
-| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08  | Get Airflow configs with sensitive data from Secret Backends (#9645)           |
-| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06  | Upgrade to latest pre-commit checks (#9686)                                    |
-| [a99aaeb49](https://github.com/apache/airflow/commit/a99aaeb49672e913d5ff79606237f6f3614fc8f5) | 2020-07-03  | Allow setting Hashicorp Vault token from File (#9644)                          |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                 |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                     |
-| [df693e0e3](https://github.com/apache/airflow/commit/df693e0e3138f6601c4776cd529d8cb7bcde2f90) | 2020-06-19  | Add more authentication options for HashiCorp Vault classes (#8974)            |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                    |
-| [d47e070a7](https://github.com/apache/airflow/commit/d47e070a79b574cca043ca9c06f91d47eecb3040) | 2020-06-17  | Add HashiCorp Vault Hook (split-out from Vault secret backend) (#9333)         |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                         |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                   |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)        |
-| [d8cb0b5dd](https://github.com/apache/airflow/commit/d8cb0b5ddb02d194742e374d9ac90dd8231f6e80) | 2020-05-04  | Support k8s auth method in Vault Secrets provider (#8640)                      |
-| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09  | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)               |
-| [c1c88abfe](https://github.com/apache/airflow/commit/c1c88abfede7a36c3b1d1b511fbc6c03af46d363) | 2020-03-28  | Get Airflow Variables from Hashicorp Vault (#7944)                             |
-| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28  | Make BaseSecretsBackend.build_path generic (#7948)                             |
-| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25  | Standardize SecretBackend class names (#7846)                                  |
-| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23  | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830)                 |
-| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23  | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795)              |
-| [a44beaf5b](https://github.com/apache/airflow/commit/a44beaf5bddae2a8de0429af45be5ff78a7d4d4e) | 2020-03-19  | [AIRFLOW-7076] Add support for HashiCorp Vault as Secrets Backend (#7741)      |
diff --git a/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
index e1091fe..55dffd1 100644
--- a/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
+++ b/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.11.23.md
@@ -4,7 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)                                       |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)                                       |
 | [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                           |
 | [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                   |
 | [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                  |
diff --git a/airflow/providers/http/BACKPORT_PROVIDER_README.md b/airflow/providers/http/BACKPORT_PROVIDER_README.md
index 0fcc614..6a63277 100644
--- a/airflow/providers/http/BACKPORT_PROVIDER_README.md
+++ b/airflow/providers/http/BACKPORT_PROVIDER_README.md
@@ -103,7 +103,7 @@ in [Naming conventions for provider packages](https://github.com/apache/airflow/
 
 | Commit                                                                                         | Committed   | Subject                                                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------------------|
-| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)                                       |
+| [4873d9759](https://github.com/apache/airflow/commit/4873d9759dfdec1dd3663074f9e64ad69fa881cc) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427)                                       |
 | [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                           |
 | [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                   |
 | [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                  |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/http/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/http/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/http/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/http/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/http/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index fc6ef37..0000000
--- a/airflow/providers/http/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,65 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [c1cd50465](https://github.com/apache/airflow/commit/c1cd50465c5473bc817fded5eeb4c425a0529ae5) | 2020-12-05  | Add &#39;headers&#39; to template_fields in HttpSensor (#12809)                                                                                                            |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                                                                                                   |
-| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21  | Fix Python Docstring parameters (#12513)                                                                                                                           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)                                                                                        |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [badd89067](https://github.com/apache/airflow/commit/badd890675d3cb3dfc088bff6a1d73dfdc275f31) | 2020-11-09  | Extend the same keyword args callable support in PythonOperator to some other sensors/operators (#11922)                                                           |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                                                                                               |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [3cddc1182](https://github.com/apache/airflow/commit/3cddc11821ff8f9ed0811384c0643f756a2b3dfa) | 2020-10-16  | Updated template_fields_rendereds for PostgresOperator and SimpleHttpOperator (#11555)                                                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [dc3a4938c](https://github.com/apache/airflow/commit/dc3a4938caa508f4a79985f5f6fa506adf4c29d4) | 2020-08-22  | Fix duplicate task_ids in example_http.py (#10485)                                                                                                                 |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                                                                                        |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)                                                                                               |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                                                                                               |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                                                                                        |
-| [ac93419d1](https://github.com/apache/airflow/commit/ac93419d1d15fb7779f5dc9cf30b2bca65d13b9e) | 2020-07-22  | Add response_filter parameter to SimpleHttpOperator (#9885)                                                                                                        |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                                                                                                       |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18  | Detect automatically the lack of reference to the guide in the operator descriptions (#9290)                                                                       |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [249e80b96](https://github.com/apache/airflow/commit/249e80b960ab3453763903493bbb77651be9073b) | 2020-04-30  | Add http system test (#8591)                                                                                                                                       |
-| [ddd005e3b](https://github.com/apache/airflow/commit/ddd005e3b97e82ce715dc6604ff60ed5768de6ea) | 2020-04-18  | [AIRFLOW-5156] Fixed doc strigns for HttpHook (#8434)                                                                                                              |
-| [d61a476da](https://github.com/apache/airflow/commit/d61a476da3a649bf2c1d347b9cb3abc62eae3ce9) | 2020-04-18  | [AIRFLOW-5156] Added auth type to HttpHook (#8429)                                                                                                                 |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                                                                                   |
-| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23  | Add missing call to Super class in &#39;http&#39;, &#39;grpc&#39; &amp; &#39;slack&#39; providers (#7826)                                                                                      |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412)                                                                           |
-| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03  | [AIRFLOW-4681] Make sensors module pylint compatible (#7309)                                                                                                       |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27  | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268)                                                                                         |
diff --git a/airflow/providers/http/README.md b/airflow/providers/http/README.md
deleted file mode 100644
index 2068fb9..0000000
--- a/airflow/providers/http/README.md
+++ /dev/null
@@ -1,164 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-
-# Package apache-airflow-providers-http
-
-Release: 1.0.0
-
-**Table of contents**
-
-- [Provider package](#provider-package)
-- [Installation](#installation)
-- [Provider class summary](#provider-classes-summary)
-    - [Operators](#operators)
-        - [Moved operators](#moved-operators)
-    - [Sensors](#sensors)
-        - [Moved sensors](#moved-sensors)
-    - [Hooks](#hooks)
-        - [Moved hooks](#moved-hooks)
-- [Releases](#releases)
-    - [Release 1.0.0](#release-100)
-
-## Provider package
-
-This is a provider package for `http` provider. All classes for this provider package
-are in `airflow.providers.http` python package.
-
-
-
-## Installation
-
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-You can install this package on top of an existing airflow 2.* installation via
-`pip install apache-airflow-providers-http`
-
-# Provider classes summary
-
-In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `http` provider
-are in the `airflow.providers.http` package. You can read more about the naming conventions used
-in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
-
-
-## Operators
-
-
-
-### Moved operators
-
-| Airflow 2.0 operators: `airflow.providers.http` package                                                                     | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                         |
-|:----------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.http.SimpleHttpOperator](https://github.com/apache/airflow/blob/master/airflow/providers/http/operators/http.py) | [operators.http_operator.SimpleHttpOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/http_operator.py) |
-
-
-## Sensors
-
-
-
-### Moved sensors
-
-| Airflow 2.0 sensors: `airflow.providers.http` package                                                           | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                         |
-|:----------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------|
-| [sensors.http.HttpSensor](https://github.com/apache/airflow/blob/master/airflow/providers/http/sensors/http.py) | [sensors.http_sensor.HttpSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/http_sensor.py) |
-
-
-## Hooks
-
-
-
-### Moved hooks
-
-| Airflow 2.0 hooks: `airflow.providers.http` package                                                       | Airflow 1.10.* previous location (usually `airflow.contrib`)                                               |
-|:----------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------|
-| [hooks.http.HttpHook](https://github.com/apache/airflow/blob/master/airflow/providers/http/hooks/http.py) | [hooks.http_hook.HttpHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/http_hook.py) |
-
-
-
-## Releases
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [c1cd50465](https://github.com/apache/airflow/commit/c1cd50465c5473bc817fded5eeb4c425a0529ae5) | 2020-12-05  | Add &#39;headers&#39; to template_fields in HttpSensor (#12809)                                                                                                            |
-| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30  | Move operator guides to provider documentation packages (#12681)                                                                                                   |
-| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21  | Fix Python Docstring parameters (#12513)                                                                                                                           |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18  | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)                                                                                        |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [badd89067](https://github.com/apache/airflow/commit/badd890675d3cb3dfc088bff6a1d73dfdc275f31) | 2020-11-09  | Extend the same keyword args callable support in PythonOperator to some other sensors/operators (#11922)                                                           |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                                                                                               |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [3cddc1182](https://github.com/apache/airflow/commit/3cddc11821ff8f9ed0811384c0643f756a2b3dfa) | 2020-10-16  | Updated template_fields_rendereds for PostgresOperator and SimpleHttpOperator (#11555)                                                                             |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09  | Upgrade black to 20.8b1 (#10818)                                                                                                                                   |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [dc3a4938c](https://github.com/apache/airflow/commit/dc3a4938caa508f4a79985f5f6fa506adf4c29d4) | 2020-08-22  | Fix duplicate task_ids in example_http.py (#10485)                                                                                                                 |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                                                                                        |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)                                                                                               |
-| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02  | Remove `args` parameter from provider operator constructors (#10097)                                                                                               |
-| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25  | Stop using start_date in default_args in example_dags (2) (#9985)                                                                                                  |
-| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22  | apply_default keeps the function signature for mypy (#9784)                                                                                                        |
-| [ac93419d1](https://github.com/apache/airflow/commit/ac93419d1d15fb7779f5dc9cf30b2bca65d13b9e) | 2020-07-22  | Add response_filter parameter to SimpleHttpOperator (#9885)                                                                                                        |
-| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19  | Increase typing for Apache and http provider package (#9729)                                                                                                       |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18  | Detect automatically the lack of reference to the guide in the operator descriptions (#9290)                                                                       |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [249e80b96](https://github.com/apache/airflow/commit/249e80b960ab3453763903493bbb77651be9073b) | 2020-04-30  | Add http system test (#8591)                                                                                                                                       |
-| [ddd005e3b](https://github.com/apache/airflow/commit/ddd005e3b97e82ce715dc6604ff60ed5768de6ea) | 2020-04-18  | [AIRFLOW-5156] Fixed doc strigns for HttpHook (#8434)                                                                                                              |
-| [d61a476da](https://github.com/apache/airflow/commit/d61a476da3a649bf2c1d347b9cb3abc62eae3ce9) | 2020-04-18  | [AIRFLOW-5156] Added auth type to HttpHook (#8429)                                                                                                                 |
-| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23  | Make airflow/providers pylint compatible (#7802)                                                                                                                   |
-| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23  | Add missing call to Super class in &#39;http&#39;, &#39;grpc&#39; &amp; &#39;slack&#39; providers (#7826)                                                                                      |
-| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24  | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517)                                                                                   |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18  | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412)                                                                           |
-| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03  | [AIRFLOW-4681] Make sensors module pylint compatible (#7309)                                                                                                       |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30  | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)                                                                                                 |
-| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27  | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268)                                                                                         |
diff --git a/docs/apache-airflow-providers-google/index.rst b/airflow/providers/imap/CHANGELOG.rst
similarity index 53%
copy from docs/apache-airflow-providers-google/index.rst
copy to airflow/providers/imap/CHANGELOG.rst
index 140d473..cef7dda 100644
--- a/docs/apache-airflow-providers-google/index.rst
+++ b/airflow/providers/imap/CHANGELOG.rst
@@ -15,32 +15,11 @@
     specific language governing permissions and limitations
     under the License.
 
-``apache-airflow-providers-google``
-===================================
 
-Content
--------
+Changelog
+---------
 
-.. toctree::
-    :maxdepth: 1
-    :caption: Guides
+1.0.0
+.....
 
-    Connection types <connections/index>
-    Logging handlers <logging/index>
-    Secrets backends <secrets-backends/google-cloud-secret-manager-backend>
-    API Authentication backend <api-auth-backend/google-openid>
-    Operators <operators/index>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: References
-
-    Python API <_api/airflow/providers/google/index>
-    Configuration <configurations-ref>
-
-.. toctree::
-    :maxdepth: 1
-    :caption: Resources
-
-    Example DAGs <example-dags>
-    PyPI Repository <https://pypi.org/project/apache-airflow-providers-google/>
+Initial version of the provider.
diff --git a/airflow/providers/imap/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/imap/PROVIDER_CHANGES_1.0.0.md
deleted file mode 100644
index 585bf18..0000000
--- a/airflow/providers/imap/PROVIDER_CHANGES_1.0.0.md
+++ /dev/null
@@ -1,50 +0,0 @@
-
-
-### Release 1.0.0
-
-| Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
-|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08  | Rename remaing modules to match AIP-21 (#12917)                                                                                                                    |
-| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08  | Add support for dynamic connection form fields per provider (#12558)                                                                                               |
-| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29  | Adds support for Connection/Hook discovery from providers (#12466)                                                                                                 |
-| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20  | Separate out documentation building per provider  (#12444)                                                                                                         |
-| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18  | Update provider READMEs for 1.0.0b2 batch release (#12449)                                                                                                         |
-| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17  | Update wrong commit hash in backport provider changes (#12390)                                                                                                     |
-| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15  | Improvements for operators and hooks ref docs (#12366)                                                                                                             |
-| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13  | Docs installation improvements (#12304)                                                                                                                            |
-| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09  | Point at pypi project pages for cross-dependency of provider packages (#12212)                                                                                     |
-| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09  | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)                                                                                                 |
-| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09  | Moves provider packages scripts to dev (#12082)                                                                                                                    |
-| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04  | Simplify string expressions (#12093)                                                                                                                               |
-| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03  | Enable Black - Python Auto Formmatter (#9550)                                                                                                                      |
-| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26  | Prepare providers release 0.0.2a1 (#11855)                                                                                                                         |
-| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25  | Generated backport providers readmes/setup for 2020.10.29 (#11826)                                                                                                 |
-| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20  | Add D200 pydocstyle check (#11688)                                                                                                                                 |
-| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13  | Added support for provider packages for Airflow 2.0 (#11487)                                                                                                       |
-| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03  | Fix Broken Markdown links in Providers README TOC (#11249)                                                                                                         |
-| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02  | Fixed month in backport packages to October (#11242)                                                                                                               |
-| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02  | Prepare Backport release 2020.09.07 (#11238)                                                                                                                       |
-| [08dfd8cd0](https://github.com/apache/airflow/commit/08dfd8cd00dae2d7aad53018af04428d933b1ceb) | 2020-09-25  | Increase Type coverage for IMAP provider (#11154)                                                                                                                  |
-| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22  | Add D202 pydocstyle check (#11032)                                                                                                                                 |
-| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25  | Enable Black on Providers Packages (#10543)                                                                                                                        |
-| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24  | Fix typo in the word &#34;release&#34; (#10528)                                                                                                                            |
-| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22  | Fix broken Markdown refernces in Providers README (#10483)                                                                                                         |
-| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07  | Add correct signature to all operators and sensors (#10205)                                                                                                        |
-| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06  | Changes to all the constructors to remove the args argument (#10163)                                                                                               |
-| [0aff69fbd](https://github.com/apache/airflow/commit/0aff69fbd2f5a09c51f5b503ebf1bb72a26d3290) | 2020-07-27  | Add typing to ImapHook (#9887)                                                                                                                                     |
-| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19  | Fixed release number for fresh release (#9408)                                                                                                                     |
-| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19  | Final cleanup for 2020.6.23rc1 release preparation (#9404)                                                                                                         |
-| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19  | Prepare backport release candidate for 2020.6.23rc1 (#9370)                                                                                                        |
-| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16  | Introduce &#39;transfers&#39; packages (#9320)                                                                                                                             |
-| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
-| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
-| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
-| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
-| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
-| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16  | Regenerate readme files for backport package release (#8886)                                                                                                       |
-| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15  | Added automated release notes generation for backport operators (#8807)                                                                                            |
-| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23  | Add missing call to Super class in remaining providers (#7828)                                                                                                     |
-| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22  | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) |
-| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02  | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)                                                                                                           |
-| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02  | [AIRFLOW-6708] Set unique logger names (#7330)                                                                                                                     |
-| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27  | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268)                                                                                         |
diff --git a/airflow/providers/imap/README.md b/airflow/providers/imap/README.md
deleted file mode 100644
index d56d79d..0000000
--- a/airflow/providers/imap/README.md
+++ /dev/null
@@ -1,136 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
... 15029 lines suppressed ...


[airflow] 03/41: Log all breeze output to a file automatically (#14470)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 656a46792363850dfce93e8a0fce7351947f248c
Author: Jarek Potiuk <po...@apache.org>
AuthorDate: Fri Feb 26 21:49:56 2021 +0100

    Log all breeze output to a file automatically (#14470)
    
    (cherry picked from commit 4a54292b69bb9a68a354c34246f019331270df3d)
---
 BREEZE.rst |  2 ++
 breeze     | 10 ++++++++++
 2 files changed, 12 insertions(+)

diff --git a/BREEZE.rst b/BREEZE.rst
index f4689ba..730d6a1 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -36,6 +36,8 @@ We called it *Airflow Breeze* as **It's a Breeze to contribute to Airflow**.
 The advantages and disadvantages of using the Breeze environment vs. other ways of testing Airflow
 are described in `CONTRIBUTING.rst <CONTRIBUTING.rst#integration-test-development-environment>`_.
 
+All the output from the last ./breeze command is automatically logged to the ``logs/breeze.out`` file.
+
 Watch the video below about Airflow Breeze. It explains the motivation for Breeze
 and screencasts all its uses.
 
diff --git a/breeze b/breeze
index c5d420b..124ed85 100755
--- a/breeze
+++ b/breeze
@@ -18,10 +18,20 @@
 # under the License.
 set -euo pipefail
 
+
 AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+
+if [[ ${BREEZE_REDIRECT=} == "" ]]; then
+    mkdir -p "${AIRFLOW_SOURCES}"/logs
+    export BREEZE_REDIRECT="true"
+    "${0}" "${@}" 2>&1 | tee "${AIRFLOW_SOURCES}"/logs/breeze.out
+    exit
+fi
+
 export AIRFLOW_SOURCES
 readonly AIRFLOW_SOURCES
 
+
 # Bash arrays need to be defined outside of functions unfortunately :(
 # Because on Mac OS Bash 3.4 defining arrays inside functions does not work
 # Array with extra options for Docker compose


[airflow] 12/41: Add Neo4j hook and operator (#13324)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 3072feb021f9dd17b17ff39e3649509f45e96d7f
Author: Kanthi <su...@gmail.com>
AuthorDate: Thu Jan 14 11:27:50 2021 -0500

    Add Neo4j hook and operator (#13324)
    
    Close: #12873
    (cherry picked from commit 1d2977f6a4c67fa6174c79dcdc4e9ee3ce06f1b1)
---
 CONTRIBUTING.rst                                   |   9 +-
 INSTALL                                            |   9 +-
 airflow/providers/neo4j/README.md                  |  18 ++++
 airflow/providers/neo4j/__init__.py                |  17 +++
 airflow/providers/neo4j/example_dags/__init__.py   |  17 +++
 .../providers/neo4j/example_dags/example_neo4j.py  |  48 +++++++++
 airflow/providers/neo4j/hooks/__init__.py          |  17 +++
 airflow/providers/neo4j/hooks/neo4j.py             | 117 +++++++++++++++++++++
 airflow/providers/neo4j/operators/__init__.py      |  17 +++
 airflow/providers/neo4j/operators/neo4j.py         |  62 +++++++++++
 airflow/providers/neo4j/provider.yaml              |  44 ++++++++
 .../connections/neo4j.rst                          |  63 +++++++++++
 docs/apache-airflow-providers-neo4j/index.rst      |  48 +++++++++
 .../operators/neo4j.rst                            |  50 +++++++++
 docs/apache-airflow/concepts.rst                   |   4 +-
 docs/apache-airflow/extra-packages-ref.rst         |   2 +
 docs/apache-airflow/start/local.rst                |   2 +-
 docs/spelling_wordlist.txt                         |   4 +
 .../run_install_and_test_provider_packages.sh      |   4 +-
 setup.py                                           |   3 +
 tests/core/test_providers_manager.py               |   2 +
 tests/providers/neo4j/__init__.py                  |  17 +++
 tests/providers/neo4j/hooks/__init__.py            |  17 +++
 tests/providers/neo4j/hooks/test_neo4j.py          |  65 ++++++++++++
 tests/providers/neo4j/operators/__init__.py        |  17 +++
 tests/providers/neo4j/operators/test_neo4j.py      |  61 +++++++++++
 26 files changed, 721 insertions(+), 13 deletions(-)

diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index ff8c80a..6d0e224 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -578,10 +578,11 @@ async, atlas, aws, azure, cassandra, celery, cgroups, cloudant, cncf.kubernetes,
 databricks, datadog, devel, devel_all, devel_ci, devel_hadoop, dingding, discord, doc, docker,
 druid, elasticsearch, exasol, facebook, ftp, gcp, gcp_api, github_enterprise, google, google_auth,
 grpc, hashicorp, hdfs, hive, http, imap, jdbc, jenkins, jira, kerberos, kubernetes, ldap,
-microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, mysql, odbc, openfaas, opsgenie,
-oracle, pagerduty, papermill, password, pinot, plexus, postgres, presto, qds, qubole, rabbitmq,
-redis, s3, salesforce, samba, segment, sendgrid, sentry, sftp, singularity, slack, snowflake, spark,
-sqlite, ssh, statsd, tableau, telegram, vertica, virtualenv, webhdfs, winrm, yandex, zendesk
+microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, mysql, neo4j, odbc, openfaas,
+opsgenie, oracle, pagerduty, papermill, password, pinot, plexus, postgres, presto, qds, qubole,
+rabbitmq, redis, s3, salesforce, samba, segment, sendgrid, sentry, sftp, singularity, slack,
+snowflake, spark, sqlite, ssh, statsd, tableau, telegram, vertica, virtualenv, webhdfs, winrm,
+yandex, zendesk
 
   .. END EXTRAS HERE
 
diff --git a/INSTALL b/INSTALL
index 4ee3f2b..e1ef456 100644
--- a/INSTALL
+++ b/INSTALL
@@ -103,10 +103,11 @@ async, atlas, aws, azure, cassandra, celery, cgroups, cloudant, cncf.kubernetes,
 databricks, datadog, devel, devel_all, devel_ci, devel_hadoop, dingding, discord, doc, docker,
 druid, elasticsearch, exasol, facebook, ftp, gcp, gcp_api, github_enterprise, google, google_auth,
 grpc, hashicorp, hdfs, hive, http, imap, jdbc, jenkins, jira, kerberos, kubernetes, ldap,
-microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, mysql, odbc, openfaas, opsgenie,
-oracle, pagerduty, papermill, password, pinot, plexus, postgres, presto, qds, qubole, rabbitmq,
-redis, s3, salesforce, samba, segment, sendgrid, sentry, sftp, singularity, slack, snowflake, spark,
-sqlite, ssh, statsd, tableau, telegram, vertica, virtualenv, webhdfs, winrm, yandex, zendesk
+microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, mysql, neo4j, odbc, openfaas,
+opsgenie, oracle, pagerduty, papermill, password, pinot, plexus, postgres, presto, qds, qubole,
+rabbitmq, redis, s3, salesforce, samba, segment, sendgrid, sentry, sftp, singularity, slack,
+snowflake, spark, sqlite, ssh, statsd, tableau, telegram, vertica, virtualenv, webhdfs, winrm,
+yandex, zendesk
 
 # END EXTRAS HERE
 
diff --git a/airflow/providers/neo4j/README.md b/airflow/providers/neo4j/README.md
new file mode 100644
index 0000000..ef14aff
--- /dev/null
+++ b/airflow/providers/neo4j/README.md
@@ -0,0 +1,18 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+ -->
diff --git a/airflow/providers/neo4j/__init__.py b/airflow/providers/neo4j/__init__.py
new file mode 100644
index 0000000..217e5db
--- /dev/null
+++ b/airflow/providers/neo4j/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/neo4j/example_dags/__init__.py b/airflow/providers/neo4j/example_dags/__init__.py
new file mode 100644
index 0000000..217e5db
--- /dev/null
+++ b/airflow/providers/neo4j/example_dags/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/neo4j/example_dags/example_neo4j.py b/airflow/providers/neo4j/example_dags/example_neo4j.py
new file mode 100644
index 0000000..7d6f2fc
--- /dev/null
+++ b/airflow/providers/neo4j/example_dags/example_neo4j.py
@@ -0,0 +1,48 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Example use of Neo4j related operators.
+"""
+
+from airflow import DAG
+from airflow.providers.neo4j.operators.neo4j import Neo4jOperator
+from airflow.utils.dates import days_ago
+
+default_args = {
+    'owner': 'airflow',
+}
+
+dag = DAG(
+    'example_neo4j',
+    default_args=default_args,
+    start_date=days_ago(2),
+    tags=['example'],
+)
+
+# [START run_query_neo4j_operator]
+
+neo4j_task = Neo4jOperator(
+    task_id='run_neo4j_query',
+    neo4j_conn_id='neo4j_conn_id',
+    sql='MATCH (tom {name: "Tom Hanks"}) RETURN tom',
+    dag=dag,
+)
+
+# [END run_query_neo4j_operator]
+
+neo4j_task
diff --git a/airflow/providers/neo4j/hooks/__init__.py b/airflow/providers/neo4j/hooks/__init__.py
new file mode 100644
index 0000000..217e5db
--- /dev/null
+++ b/airflow/providers/neo4j/hooks/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/neo4j/hooks/neo4j.py b/airflow/providers/neo4j/hooks/neo4j.py
new file mode 100644
index 0000000..d473b01
--- /dev/null
+++ b/airflow/providers/neo4j/hooks/neo4j.py
@@ -0,0 +1,117 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""This module allows to connect to a Neo4j database."""
+
+from neo4j import GraphDatabase, Neo4jDriver, Result
+
+from airflow.hooks.base import BaseHook
+from airflow.models import Connection
+
+
+class Neo4jHook(BaseHook):
+    """
+    Interact with Neo4j.
+
+    Performs a connection to Neo4j and runs the query.
+    """
+
+    conn_name_attr = 'neo4j_conn_id'
+    default_conn_name = 'neo4j_default'
+    conn_type = 'neo4j'
+    hook_name = 'Neo4j'
+
+    def __init__(self, conn_id: str = default_conn_name, *args, **kwargs) -> None:
+        super().__init__(*args, **kwargs)
+        self.neo4j_conn_id = conn_id
+        self.connection = kwargs.pop("connection", None)
+        self.client = None
+        self.extras = None
+        self.uri = None
+
+    def get_conn(self) -> Neo4jDriver:
+        """
+        Function that initiates a new Neo4j connection
+        with username, password and database schema.
+        """
+        self.connection = self.get_connection(self.neo4j_conn_id)
+        self.extras = self.connection.extra_dejson.copy()
+
+        self.uri = self.get_uri(self.connection)
+        self.log.info('URI: %s', self.uri)
+
+        if self.client is not None:
+            return self.client
+
+        is_encrypted = self.connection.extra_dejson.get('encrypted', False)
+
+        self.client = GraphDatabase.driver(
+            self.uri, auth=(self.connection.login, self.connection.password), encrypted=is_encrypted
+        )
+
+        return self.client
+
+    def get_uri(self, conn: Connection) -> str:
+        """
+        Build the uri based on extras
+        - Default - uses bolt scheme(bolt://)
+        - neo4j_scheme - neo4j://
+        - certs_self_signed - neo4j+ssc://
+        - certs_trusted_ca - neo4j+s://
+        :param conn: connection object.
+        :return: uri
+        """
+        use_neo4j_scheme = conn.extra_dejson.get('neo4j_scheme', False)
+        scheme = 'neo4j' if use_neo4j_scheme else 'bolt'
+
+        # Self signed certificates
+        ssc = conn.extra_dejson.get('certs_self_signed', False)
+
+        # Only certificates signed by CA.
+        trusted_ca = conn.extra_dejson.get('certs_trusted_ca', False)
+        encryption_scheme = ''
+
+        if ssc:
+            encryption_scheme = '+ssc'
+        elif trusted_ca:
+            encryption_scheme = '+s'
+
+        return '{scheme}{encryption_scheme}://{host}:{port}'.format(
+            scheme=scheme,
+            encryption_scheme=encryption_scheme,
+            host=conn.host,
+            port='7687' if conn.port is None else f'{conn.port}',
+        )
+
+    def run(self, query) -> Result:
+        """
+        Function to create a neo4j session
+        and execute the query in the session.
+
+
+        :param query: Neo4j query
+        :return: Result
+        """
+        driver = self.get_conn()
+        if not self.connection.schema:
+            with driver.session() as session:
+                result = session.run(query)
+        else:
+            with driver.session(database=self.connection.schema) as session:
+                result = session.run(query)
+        return result
diff --git a/airflow/providers/neo4j/operators/__init__.py b/airflow/providers/neo4j/operators/__init__.py
new file mode 100644
index 0000000..217e5db
--- /dev/null
+++ b/airflow/providers/neo4j/operators/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/neo4j/operators/neo4j.py b/airflow/providers/neo4j/operators/neo4j.py
new file mode 100644
index 0000000..20df9cb
--- /dev/null
+++ b/airflow/providers/neo4j/operators/neo4j.py
@@ -0,0 +1,62 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from typing import Dict, Iterable, Mapping, Optional, Union
+
+from airflow.models import BaseOperator
+from airflow.providers.neo4j.hooks.neo4j import Neo4jHook
+from airflow.utils.decorators import apply_defaults
+
+
+class Neo4jOperator(BaseOperator):
+    """
+    Executes sql code in a specific Neo4j database
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:Neo4jOperator`
+
+    :param sql: the sql code to be executed. Can receive a str representing a
+        sql statement, a list of str (sql statements)
+    :type sql: str or list[str]
+    :param neo4j_conn_id: reference to a specific Neo4j database
+    :type neo4j_conn_id: str
+    """
+
+    @apply_defaults
+    def __init__(
+        self,
+        *,
+        sql: str,
+        neo4j_conn_id: str = 'neo4j_default',
+        parameters: Optional[Union[Mapping, Iterable]] = None,
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.neo4j_conn_id = neo4j_conn_id
+        self.sql = sql
+        self.parameters = parameters
+        self.hook = None
+
+    def get_hook(self):
+        """Function to retrieve the Neo4j Hook."""
+        return Neo4jHook(conn_id=self.neo4j_conn_id)
+
+    def execute(self, context: Dict) -> None:
+        self.log.info('Executing: %s', self.sql)
+        self.hook = self.get_hook()
+        self.hook.run(self.sql)
diff --git a/airflow/providers/neo4j/provider.yaml b/airflow/providers/neo4j/provider.yaml
new file mode 100644
index 0000000..9081694
--- /dev/null
+++ b/airflow/providers/neo4j/provider.yaml
@@ -0,0 +1,44 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+---
+package-name: apache-airflow-providers-neo4j
+name: Neo4j
+description: |
+    `Neo4j <https://neo4j.com/>`__
+
+versions:
+  - 1.0.0
+integrations:
+  - integration-name: Neo4j
+    external-doc-url: https://neo4j.com/
+    how-to-guide:
+      - /docs/apache-airflow-providers-neo4j/operators/neo4j.rst
+    tags: [software]
+
+operators:
+  - integration-name: Neo4j
+    python-modules:
+      - airflow.providers.neo4j.operators.neo4j
+
+hooks:
+  - integration-name: Neo4j
+    python-modules:
+      - airflow.providers.neo4j.hooks.neo4j
+
+hook-class-names:
+  - airflow.providers.neo4j.hooks.neo4j.Neo4jHook
diff --git a/docs/apache-airflow-providers-neo4j/connections/neo4j.rst b/docs/apache-airflow-providers-neo4j/connections/neo4j.rst
new file mode 100644
index 0000000..33fd6b5
--- /dev/null
+++ b/docs/apache-airflow-providers-neo4j/connections/neo4j.rst
@@ -0,0 +1,63 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+
+
+Neo4j Connection
+================
+The Neo4j connection type provides connection to a Neo4j database.
+
+Configuring the Connection
+--------------------------
+Host (required)
+    The host to connect to.
+
+Schema (optional)
+    Specify the schema name to be used in the database.
+
+Login (required)
+    Specify the user name to connect.
+
+Password (required)
+    Specify the password to connect.
+
+Extra (optional)
+    Specify the extra parameters (as json dictionary) that can be used in Neo4j
+    connection.
+
+    The following extras are supported:
+
+        - Default - uses bolt scheme(bolt://)
+        - neo4j_scheme - neo4j://
+        - certs_self_signed - neo4j+ssc://
+        - certs_trusted_ca - neo4j+s://
+
+      * ``encrypted``: Sets encrypted=True/False for GraphDatabase.driver, Set to ``True`` for Neo4j Aura.
+      * ``neo4j_scheme``: Specifies the scheme to ``neo4j://``, default is ``bolt://``
+      * ``certs_self_signed``: Sets the URI scheme to support self-signed certificates(``neo4j+ssc://``)
+      * ``certs_trusted_ca``: Sets the URI scheme to support only trusted CA(``neo4j+s://``)
+
+      Example "extras" field:
+
+      .. code-block:: json
+
+         {
+            "encrypted": true,
+            "neo4j_scheme": true,
+            "certs_self_signed": true,
+            "certs_trusted_ca": false
+         }
diff --git a/docs/apache-airflow-providers-neo4j/index.rst b/docs/apache-airflow-providers-neo4j/index.rst
new file mode 100644
index 0000000..cafc57b
--- /dev/null
+++ b/docs/apache-airflow-providers-neo4j/index.rst
@@ -0,0 +1,48 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+``apache-airflow-providers-neo4j``
+==================================
+
+Content
+-------
+
+.. toctree::
+    :maxdepth: 1
+    :caption: Guides
+
+    Connection types <connections/neo4j>
+    Operators <operators/neo4j>
+
+.. toctree::
+    :maxdepth: 1
+    :caption: References
+
+    Python API <_api/airflow/providers/neo4j/index>
+
+.. toctree::
+    :maxdepth: 1
+    :caption: Resources
+
+    Example DAGs <https://github.com/apache/airflow/tree/master/airflow/providers/neo4j/example_dags>
+
+.. toctree::
+    :maxdepth: 1
+    :caption: Resources
+
+    PyPI Repository <https://pypi.org/project/apache-airflow-providers-neo4j/>
diff --git a/docs/apache-airflow-providers-neo4j/operators/neo4j.rst b/docs/apache-airflow-providers-neo4j/operators/neo4j.rst
new file mode 100644
index 0000000..411aa0c
--- /dev/null
+++ b/docs/apache-airflow-providers-neo4j/operators/neo4j.rst
@@ -0,0 +1,50 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+
+
+.. _howto/operator:Neo4jOperator:
+
+Neo4jOperator
+=============
+
+Use the :class:`~airflow.providers.neo4j.operators.Neo4jOperator` to execute
+SQL commands in a `Neo4j <https://neo4j.com/>`__ database.
+
+
+Using the Operator
+^^^^^^^^^^^^^^^^^^
+
+Use the ``neo4j_conn_id`` argument to connect to your Neo4j instance where
+the connection metadata is structured as follows:
+
+.. list-table:: Neo4j Airflow Connection Metadata
+   :widths: 25 25
+   :header-rows: 1
+
+   * - Parameter
+     - Input
+   * - Host: string
+     - Neo4j hostname
+   * - Schema: string
+     - Database name
+   * - Login: string
+     - Neo4j user
+   * - Password: string
+     - Neo4j user password
+   * - Port: int
+     - Neo4j port
diff --git a/docs/apache-airflow/concepts.rst b/docs/apache-airflow/concepts.rst
index 346f6c0..0522c0f 100644
--- a/docs/apache-airflow/concepts.rst
+++ b/docs/apache-airflow/concepts.rst
@@ -1321,8 +1321,8 @@ In case of DAG and task policies users may raise :class:`~airflow.exceptions.Air
 to prevent a DAG from being imported or prevent a task from being executed if the task is not compliant with
 users' check.
 
-Please note, cluster policy will have precedence over task attributes defined in DAG meaning
-if ``task.sla`` is defined in dag and also mutated via cluster policy then later will have precedence.
+Please note, cluster policy will have precedence over task attributes defined in DAG meaning that
+if ``task.sla`` is defined in dag and also mutated via cluster policy then the latter will have precedence.
 
 In next sections we show examples of each type of cluster policy.
 
diff --git a/docs/apache-airflow/extra-packages-ref.rst b/docs/apache-airflow/extra-packages-ref.rst
index c565a93..b2549ae 100644
--- a/docs/apache-airflow/extra-packages-ref.rst
+++ b/docs/apache-airflow/extra-packages-ref.rst
@@ -213,6 +213,8 @@ Those are extras that add dependencies needed for integration with other softwar
 +---------------------+-----------------------------------------------------+-------------------------------------------+
 | mysql               | ``pip install 'apache-airflow[mysql]'``             | MySQL operators and hook                  |
 +---------------------+-----------------------------------------------------+-------------------------------------------+
+| neo4j               | ``pip install 'apache-airflow[neo4j]'``             | Neo4j operators and hook                  |
++---------------------+-----------------------------------------------------+-------------------------------------------+
 | odbc                | ``pip install 'apache-airflow[odbc]'``              | ODBC data sources including MS SQL Server |
 +---------------------+-----------------------------------------------------+-------------------------------------------+
 | openfaas            | ``pip install 'apache-airflow[openfaas]'``          | OpenFaaS hooks                            |
diff --git a/docs/apache-airflow/start/local.rst b/docs/apache-airflow/start/local.rst
index 7b0bb33..64aaa7a 100644
--- a/docs/apache-airflow/start/local.rst
+++ b/docs/apache-airflow/start/local.rst
@@ -86,7 +86,7 @@ the ``Admin->Configuration`` menu. The PID file for the webserver will be stored
 in ``$AIRFLOW_HOME/airflow-webserver.pid`` or in ``/run/airflow/webserver.pid``
 if started by systemd.
 
-Out of the box, Airflow uses a sqlite database, which you should outgrow
+Out of the box, Airflow uses a SQLite database, which you should outgrow
 fairly quickly since no parallelization is possible using this database
 backend. It works in conjunction with the
 :class:`~airflow.executors.sequential_executor.SequentialExecutor` which will
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index c541f06..db4342a 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -252,6 +252,7 @@ NaN
 Naik
 Namenode
 Namespace
+Neo4j
 Nextdoor
 Nones
 NotFound
@@ -992,6 +993,8 @@ navbar
 nd
 ndjson
 neighbours
+neo
+neo4j
 neq
 networkUri
 nginx
@@ -1219,6 +1222,7 @@ sqlsensor
 sqoop
 src
 srv
+ssc
 ssd
 sshHook
 sshtunnel
diff --git a/scripts/in_container/run_install_and_test_provider_packages.sh b/scripts/in_container/run_install_and_test_provider_packages.sh
index b3ee63b..969fa29 100755
--- a/scripts/in_container/run_install_and_test_provider_packages.sh
+++ b/scripts/in_container/run_install_and_test_provider_packages.sh
@@ -95,7 +95,7 @@ function discover_all_provider_packages() {
     # Columns is to force it wider, so it doesn't wrap at 80 characters
     COLUMNS=180 airflow providers list
 
-    local expected_number_of_providers=61
+    local expected_number_of_providers=62
     local actual_number_of_providers
     actual_providers=$(airflow providers list --output yaml | grep package_name)
     actual_number_of_providers=$(wc -l <<<"$actual_providers")
@@ -118,7 +118,7 @@ function discover_all_hooks() {
     group_start "Listing available hooks via 'airflow providers hooks'"
     COLUMNS=180 airflow providers hooks
 
-    local expected_number_of_hooks=59
+    local expected_number_of_hooks=60
     local actual_number_of_hooks
     actual_number_of_hooks=$(airflow providers hooks --output table | grep -c "| apache" | xargs)
     if [[ ${actual_number_of_hooks} != "${expected_number_of_hooks}" ]]; then
diff --git a/setup.py b/setup.py
index e967781..210b12f 100644
--- a/setup.py
+++ b/setup.py
@@ -360,6 +360,7 @@ mysql = [
     'mysql-connector-python>=8.0.11, <=8.0.22',
     'mysqlclient>=1.3.6,<1.4',
 ]
+neo4j = ['neo4j>=4.2.1']
 odbc = [
     'pyodbc',
 ]
@@ -557,6 +558,7 @@ PROVIDERS_REQUIREMENTS: Dict[str, List[str]] = {
     'microsoft.winrm': winrm,
     'mongo': mongo,
     'mysql': mysql,
+    'neo4j': neo4j,
     'odbc': odbc,
     'openfaas': [],
     'opsgenie': [],
@@ -711,6 +713,7 @@ ALL_DB_PROVIDERS = [
     'microsoft.mssql',
     'mongo',
     'mysql',
+    'neo4j',
     'postgres',
     'presto',
     'vertica',
diff --git a/tests/core/test_providers_manager.py b/tests/core/test_providers_manager.py
index 4c03984..7d80c58 100644
--- a/tests/core/test_providers_manager.py
+++ b/tests/core/test_providers_manager.py
@@ -57,6 +57,7 @@ ALL_PROVIDERS = [
     'apache-airflow-providers-microsoft-winrm',
     'apache-airflow-providers-mongo',
     'apache-airflow-providers-mysql',
+    'apache-airflow-providers-neo4j',
     'apache-airflow-providers-odbc',
     'apache-airflow-providers-openfaas',
     'apache-airflow-providers-opsgenie',
@@ -122,6 +123,7 @@ CONNECTIONS_LIST = [
     'mongo',
     'mssql',
     'mysql',
+    'neo4j',
     'odbc',
     'oracle',
     'pig_cli',
diff --git a/tests/providers/neo4j/__init__.py b/tests/providers/neo4j/__init__.py
new file mode 100644
index 0000000..217e5db
--- /dev/null
+++ b/tests/providers/neo4j/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/providers/neo4j/hooks/__init__.py b/tests/providers/neo4j/hooks/__init__.py
new file mode 100644
index 0000000..217e5db
--- /dev/null
+++ b/tests/providers/neo4j/hooks/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/providers/neo4j/hooks/test_neo4j.py b/tests/providers/neo4j/hooks/test_neo4j.py
new file mode 100644
index 0000000..7f64fc4
--- /dev/null
+++ b/tests/providers/neo4j/hooks/test_neo4j.py
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+import json
+import unittest
+from unittest import mock
+
+from airflow.models import Connection
+from airflow.providers.neo4j.hooks.neo4j import Neo4jHook
+
+
+class TestNeo4jHookConn(unittest.TestCase):
+    def setUp(self):
+        super().setUp()
+        self.neo4j_hook = Neo4jHook()
+        self.connection = Connection(
+            conn_type='neo4j', login='login', password='password', host='host', schema='schema'
+        )
+
+    def test_get_uri_neo4j_scheme(self):
+
+        self.neo4j_hook.get_connection = mock.Mock()
+        self.neo4j_hook.get_connection.return_value = self.connection
+        uri = self.neo4j_hook.get_uri(self.connection)
+
+        self.assertEqual(uri, "bolt://host:7687")
+
+    def test_get_uri_bolt_scheme(self):
+
+        self.connection.extra = json.dumps({"bolt_scheme": True})
+        self.neo4j_hook.get_connection = mock.Mock()
+        self.neo4j_hook.get_connection.return_value = self.connection
+        uri = self.neo4j_hook.get_uri(self.connection)
+
+        self.assertEqual(uri, "bolt://host:7687")
+
+    def test_get_uri_bolt_ssc_scheme(self):
+        self.connection.extra = json.dumps({"certs_self_signed": True, "bolt_scheme": True})
+        self.neo4j_hook.get_connection = mock.Mock()
+        self.neo4j_hook.get_connection.return_value = self.connection
+        uri = self.neo4j_hook.get_uri(self.connection)
+
+        self.assertEqual(uri, "bolt+ssc://host:7687")
+
+    def test_get_uri_bolt_trusted_ca_scheme(self):
+        self.connection.extra = json.dumps({"certs_trusted_ca": True, "bolt_scheme": True})
+        self.neo4j_hook.get_connection = mock.Mock()
+        self.neo4j_hook.get_connection.return_value = self.connection
+        uri = self.neo4j_hook.get_uri(self.connection)
+
+        self.assertEqual(uri, "bolt+s://host:7687")
diff --git a/tests/providers/neo4j/operators/__init__.py b/tests/providers/neo4j/operators/__init__.py
new file mode 100644
index 0000000..217e5db
--- /dev/null
+++ b/tests/providers/neo4j/operators/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/providers/neo4j/operators/test_neo4j.py b/tests/providers/neo4j/operators/test_neo4j.py
new file mode 100644
index 0000000..39c8d69
--- /dev/null
+++ b/tests/providers/neo4j/operators/test_neo4j.py
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+from unittest import mock
+
+from airflow.models.dag import DAG
+from airflow.providers.neo4j.operators.neo4j import Neo4jOperator
+from airflow.utils import timezone
+
+DEFAULT_DATE = timezone.datetime(2015, 1, 1)
+DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
+DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
+TEST_DAG_ID = 'unit_test_dag'
+
+
+class TestNeo4jOperator(unittest.TestCase):
+    def setUp(self):
+        args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
+        dag = DAG(TEST_DAG_ID, default_args=args)
+        self.dag = dag
+
+    @mock.patch('airflow.providers.neo4j.operators.neo4j.Neo4jOperator.get_hook')
+    def test_neo4j_operator_test(self, mock_hook):
+
+        sql = """
+            MATCH (tom {name: "Tom Hanks"}) RETURN tom
+            """
+        op = Neo4jOperator(task_id='basic_neo4j', sql=sql, dag=self.dag)
+        op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)


[airflow] 37/41: Add Tableau provider separate from Salesforce Provider (#14030)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit d3359a03a986c3674b9058e4e800c41190120e62
Author: Jyoti Dhiman <36...@users.noreply.github.com>
AuthorDate: Thu Feb 25 17:52:54 2021 +0530

    Add Tableau provider separate from Salesforce Provider (#14030)
    
    Closes #13614
    
    (cherry picked from commit 45e72ca83049a7db526b1f0fbd94c75f5f92cc75)
---
 CONTRIBUTING.rst                                   |   1 +
 airflow/providers/dependencies.json                |   3 +
 airflow/providers/salesforce/CHANGELOG.rst         |  16 ++++
 airflow/providers/salesforce/hooks/tableau.py      | 104 ++-------------------
 .../operators/tableau_refresh_workbook.py          |  88 ++---------------
 airflow/providers/salesforce/provider.yaml         |   6 +-
 .../salesforce/sensors/tableau_job_status.py       |  68 +++-----------
 .../{salesforce => tableau}/CHANGELOG.rst          |   0
 .../provider.yaml => tableau/__init__.py}          |  34 +------
 .../example_dags/__init__.py}                      |  33 -------
 .../example_tableau_refresh_workbook.py            |   4 +-
 .../provider.yaml => tableau/hooks/__init__.py}    |  34 +------
 .../{salesforce => tableau}/hooks/tableau.py       |   0
 .../operators/__init__.py}                         |  33 -------
 .../operators/tableau_refresh_workbook.py          |   4 +-
 .../{salesforce => tableau}/provider.yaml          |  26 +++---
 .../provider.yaml => tableau/sensors/__init__.py}  |  33 -------
 .../sensors/tableau_job_status.py                  |   2 +-
 .../apache-airflow-providers-tableau/index.rst     |  29 +++++-
 docs/integration-logos/tableau/tableau.png         | Bin 0 -> 4142 bytes
 docs/spelling_wordlist.txt                         |   1 +
 .../run_install_and_test_provider_packages.sh      |   2 +-
 setup.py                                           |   4 +-
 tests/core/test_providers_manager.py               |   1 +
 .../providers/tableau/hooks/__init__.py            |  34 +------
 .../{salesforce => tableau}/hooks/test_tableau.py  |  32 +++++--
 .../providers/tableau/operators/__init__.py        |  33 -------
 .../operators/test_tableau_refresh_workbook.py     |  26 +++++-
 .../providers/tableau/sensors/__init__.py          |  33 -------
 .../sensors/test_tableau_job_status.py             |  16 +++-
 30 files changed, 162 insertions(+), 538 deletions(-)

diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 0a6f381..857d3bb 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -654,6 +654,7 @@ microsoft.mssql            odbc
 mysql                      amazon,presto,vertica
 opsgenie                   http
 postgres                   amazon
+salesforce                 tableau
 sftp                       ssh
 slack                      http
 snowflake                  slack
diff --git a/airflow/providers/dependencies.json b/airflow/providers/dependencies.json
index 836020c..b01e96c 100644
--- a/airflow/providers/dependencies.json
+++ b/airflow/providers/dependencies.json
@@ -67,6 +67,9 @@
   "postgres": [
     "amazon"
   ],
+  "salesforce": [
+    "tableau"
+  ],
   "sftp": [
     "ssh"
   ],
diff --git a/airflow/providers/salesforce/CHANGELOG.rst b/airflow/providers/salesforce/CHANGELOG.rst
index cef7dda..b4eb0ed 100644
--- a/airflow/providers/salesforce/CHANGELOG.rst
+++ b/airflow/providers/salesforce/CHANGELOG.rst
@@ -19,6 +19,22 @@
 Changelog
 ---------
 
+1.0.2
+.....
+
+Tableau provider moved to separate 'tableau' provider
+
+Things done:
+
+    - Tableau classes imports classes from 'tableau' provider with deprecation warning
+
+
+1.0.1
+.....
+
+Updated documentation and readme files.
+
+
 1.0.0
 .....
 
diff --git a/airflow/providers/salesforce/hooks/tableau.py b/airflow/providers/salesforce/hooks/tableau.py
index 51c2f98..cf5f7f3 100644
--- a/airflow/providers/salesforce/hooks/tableau.py
+++ b/airflow/providers/salesforce/hooks/tableau.py
@@ -14,102 +14,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from enum import Enum
-from typing import Any, Optional
 
-from tableauserverclient import Pager, PersonalAccessTokenAuth, Server, TableauAuth
-from tableauserverclient.server import Auth
+import warnings
 
-from airflow.hooks.base import BaseHook
+# pylint: disable=unused-import
+from airflow.providers.tableau.hooks.tableau import TableauHook, TableauJobFinishCode  # noqa
 
-
-class TableauJobFinishCode(Enum):
-    """
-    The finish code indicates the status of the job.
-
-    .. seealso:: https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_ref.htm#query_job
-
-    """
-
-    PENDING = -1
-    SUCCESS = 0
-    ERROR = 1
-    CANCELED = 2
-
-
-class TableauHook(BaseHook):
-    """
-    Connects to the Tableau Server Instance and allows to communicate with it.
-
-    .. seealso:: https://tableau.github.io/server-client-python/docs/
-
-    :param site_id: The id of the site where the workbook belongs to.
-        It will connect to the default site if you don't provide an id.
-    :type site_id: Optional[str]
-    :param tableau_conn_id: The Tableau Connection id containing the credentials
-        to authenticate to the Tableau Server.
-    :type tableau_conn_id: str
-    """
-
-    conn_name_attr = 'tableau_conn_id'
-    default_conn_name = 'tableau_default'
-    conn_type = 'tableau'
-    hook_name = 'Tableau'
-
-    def __init__(self, site_id: Optional[str] = None, tableau_conn_id: str = default_conn_name) -> None:
-        super().__init__()
-        self.tableau_conn_id = tableau_conn_id
-        self.conn = self.get_connection(self.tableau_conn_id)
-        self.site_id = site_id or self.conn.extra_dejson.get('site_id', '')
-        self.server = Server(self.conn.host, use_server_version=True)
-        self.tableau_conn = None
-
-    def __enter__(self):
-        if not self.tableau_conn:
-            self.tableau_conn = self.get_conn()
-        return self
-
-    def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
-        self.server.auth.sign_out()
-
-    def get_conn(self) -> Auth.contextmgr:
-        """
-        Signs in to the Tableau Server and automatically signs out if used as ContextManager.
-
-        :return: an authorized Tableau Server Context Manager object.
-        :rtype: tableauserverclient.server.Auth.contextmgr
-        """
-        if self.conn.login and self.conn.password:
-            return self._auth_via_password()
-        if 'token_name' in self.conn.extra_dejson and 'personal_access_token' in self.conn.extra_dejson:
-            return self._auth_via_token()
-        raise NotImplementedError('No Authentication method found for given Credentials!')
-
-    def _auth_via_password(self) -> Auth.contextmgr:
-        tableau_auth = TableauAuth(
-            username=self.conn.login, password=self.conn.password, site_id=self.site_id
-        )
-        return self.server.auth.sign_in(tableau_auth)
-
-    def _auth_via_token(self) -> Auth.contextmgr:
-        tableau_auth = PersonalAccessTokenAuth(
-            token_name=self.conn.extra_dejson['token_name'],
-            personal_access_token=self.conn.extra_dejson['personal_access_token'],
-            site_id=self.site_id,
-        )
-        return self.server.auth.sign_in_with_personal_access_token(tableau_auth)
-
-    def get_all(self, resource_name: str) -> Pager:
-        """
-        Get all items of the given resource.
-
-        .. seealso:: https://tableau.github.io/server-client-python/docs/page-through-results
-
-        :param resource_name: The name of the resource to paginate.
-            For example: jobs or workbooks
-        :type resource_name: str
-        :return: all items by returning a Pager.
-        :rtype: tableauserverclient.Pager
-        """
-        resource = getattr(self.server, resource_name)
-        return Pager(resource.get)
+warnings.warn(
+    "This module is deprecated. Please use `airflow.providers.tableau.hooks.tableau`.",
+    DeprecationWarning,
+    stacklevel=2,
+)
diff --git a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py b/airflow/providers/salesforce/operators/tableau_refresh_workbook.py
index 7d4ffdc..309af33 100644
--- a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py
+++ b/airflow/providers/salesforce/operators/tableau_refresh_workbook.py
@@ -14,84 +14,16 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import Optional
 
-from tableauserverclient import WorkbookItem
+import warnings
 
-from airflow.exceptions import AirflowException
-from airflow.models import BaseOperator
-from airflow.providers.salesforce.hooks.tableau import TableauHook
-from airflow.utils.decorators import apply_defaults
+# pylint: disable=unused-import
+from airflow.providers.tableau.operators.tableau_refresh_workbook import (  # noqa
+    TableauRefreshWorkbookOperator,
+)
 
-
-class TableauRefreshWorkbookOperator(BaseOperator):
-    """
-    Refreshes a Tableau Workbook/Extract
-
-    .. seealso:: https://tableau.github.io/server-client-python/docs/api-ref#workbooks
-
-    :param workbook_name: The name of the workbook to refresh.
-    :type workbook_name: str
-    :param site_id: The id of the site where the workbook belongs to.
-    :type site_id: Optional[str]
-    :param blocking: By default the extract refresh will be blocking means it will wait until it has finished.
-    :type blocking: bool
-    :param tableau_conn_id: The Tableau Connection id containing the credentials
-        to authenticate to the Tableau Server.
-    :type tableau_conn_id: str
-    """
-
-    @apply_defaults
-    def __init__(
-        self,
-        *,
-        workbook_name: str,
-        site_id: Optional[str] = None,
-        blocking: bool = True,
-        tableau_conn_id: str = 'tableau_default',
-        **kwargs,
-    ) -> None:
-        super().__init__(**kwargs)
-        self.workbook_name = workbook_name
-        self.site_id = site_id
-        self.blocking = blocking
-        self.tableau_conn_id = tableau_conn_id
-
-    def execute(self, context: dict) -> str:
-        """
-        Executes the Tableau Extract Refresh and pushes the job id to xcom.
-
-        :param context: The task context during execution.
-        :type context: dict
-        :return: the id of the job that executes the extract refresh
-        :rtype: str
-        """
-        with TableauHook(self.site_id, self.tableau_conn_id) as tableau_hook:
-            workbook = self._get_workbook_by_name(tableau_hook)
-
-            job_id = self._refresh_workbook(tableau_hook, workbook.id)
-            if self.blocking:
-                from airflow.providers.salesforce.sensors.tableau_job_status import TableauJobStatusSensor
-
-                TableauJobStatusSensor(
-                    job_id=job_id,
-                    site_id=self.site_id,
-                    tableau_conn_id=self.tableau_conn_id,
-                    task_id='wait_until_succeeded',
-                    dag=None,
-                ).execute(context={})
-                self.log.info('Workbook %s has been successfully refreshed.', self.workbook_name)
-            return job_id
-
-    def _get_workbook_by_name(self, tableau_hook: TableauHook) -> WorkbookItem:
-        for workbook in tableau_hook.get_all(resource_name='workbooks'):
-            if workbook.name == self.workbook_name:
-                self.log.info('Found matching workbook with id %s', workbook.id)
-                return workbook
-
-        raise AirflowException(f'Workbook {self.workbook_name} not found!')
-
-    def _refresh_workbook(self, tableau_hook: TableauHook, workbook_id: str) -> str:
-        job = tableau_hook.server.workbooks.refresh(workbook_id)
-        self.log.info('Refreshing Workbook %s...', self.workbook_name)
-        return job.id
+warnings.warn(
+    "This module is deprecated. Please use `airflow.providers.tableau.operators.tableau_refresh_workbook`.",
+    DeprecationWarning,
+    stacklevel=2,
+)
diff --git a/airflow/providers/salesforce/provider.yaml b/airflow/providers/salesforce/provider.yaml
index fe739ff..c0992d8 100644
--- a/airflow/providers/salesforce/provider.yaml
+++ b/airflow/providers/salesforce/provider.yaml
@@ -22,6 +22,8 @@ description: |
     `Salesforce <https://www.salesforce.com/>`__
 
 versions:
+  - 1.0.2
+  - 1.0.1
   - 1.0.0
 
 integrations:
@@ -40,10 +42,12 @@ sensors:
       - airflow.providers.salesforce.sensors.tableau_job_status
 
 hooks:
+  - integration-name: Tableau
+    python-modules:
+      - airflow.providers.salesforce.hooks.tableau
   - integration-name: Salesforce
     python-modules:
       - airflow.providers.salesforce.hooks.salesforce
-      - airflow.providers.salesforce.hooks.tableau
 
 hook-class-names:
   - airflow.providers.salesforce.hooks.tableau.TableauHook
diff --git a/airflow/providers/salesforce/sensors/tableau_job_status.py b/airflow/providers/salesforce/sensors/tableau_job_status.py
index 4939203..076159e 100644
--- a/airflow/providers/salesforce/sensors/tableau_job_status.py
+++ b/airflow/providers/salesforce/sensors/tableau_job_status.py
@@ -14,63 +14,17 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import Optional
 
-from airflow.exceptions import AirflowException
-from airflow.providers.salesforce.hooks.tableau import TableauHook, TableauJobFinishCode
-from airflow.sensors.base import BaseSensorOperator
-from airflow.utils.decorators import apply_defaults
+import warnings
 
+# pylint: disable=unused-import
+from airflow.providers.tableau.sensors.tableau_job_status import (  # noqa
+    TableauJobFailedException,
+    TableauJobStatusSensor,
+)
 
-class TableauJobFailedException(AirflowException):
-    """An exception that indicates that a Job failed to complete."""
-
-
-class TableauJobStatusSensor(BaseSensorOperator):
-    """
-    Watches the status of a Tableau Server Job.
-
-    .. seealso:: https://tableau.github.io/server-client-python/docs/api-ref#jobs
-
-    :param job_id: The job to watch.
-    :type job_id: str
-    :param site_id: The id of the site where the workbook belongs to.
-    :type site_id: Optional[str]
-    :param tableau_conn_id: The Tableau Connection id containing the credentials
-        to authenticate to the Tableau Server.
-    :type tableau_conn_id: str
-    """
-
-    template_fields = ('job_id',)
-
-    @apply_defaults
-    def __init__(
-        self,
-        *,
-        job_id: str,
-        site_id: Optional[str] = None,
-        tableau_conn_id: str = 'tableau_default',
-        **kwargs,
-    ) -> None:
-        super().__init__(**kwargs)
-        self.tableau_conn_id = tableau_conn_id
-        self.job_id = job_id
-        self.site_id = site_id
-
-    def poke(self, context: dict) -> bool:
-        """
-        Pokes until the job has successfully finished.
-
-        :param context: The task context during execution.
-        :type context: dict
-        :return: True if it succeeded and False if not.
-        :rtype: bool
-        """
-        with TableauHook(self.site_id, self.tableau_conn_id) as tableau_hook:
-            finish_code = TableauJobFinishCode(
-                int(tableau_hook.server.jobs.get_by_id(self.job_id).finish_code)
-            )
-            self.log.info('Current finishCode is %s (%s)', finish_code.name, finish_code.value)
-            if finish_code in [TableauJobFinishCode.ERROR, TableauJobFinishCode.CANCELED]:
-                raise TableauJobFailedException('The Tableau Refresh Workbook Job failed!')
-            return finish_code == TableauJobFinishCode.SUCCESS
+warnings.warn(
+    "This module is deprecated. Please use `airflow.providers.tableau.sensors.tableau_job_status`.",
+    DeprecationWarning,
+    stacklevel=2,
+)
diff --git a/airflow/providers/salesforce/CHANGELOG.rst b/airflow/providers/tableau/CHANGELOG.rst
similarity index 100%
copy from airflow/providers/salesforce/CHANGELOG.rst
copy to airflow/providers/tableau/CHANGELOG.rst
diff --git a/airflow/providers/salesforce/provider.yaml b/airflow/providers/tableau/__init__.py
similarity index 50%
copy from airflow/providers/salesforce/provider.yaml
copy to airflow/providers/tableau/__init__.py
index fe739ff..217e5db 100644
--- a/airflow/providers/salesforce/provider.yaml
+++ b/airflow/providers/tableau/__init__.py
@@ -1,3 +1,4 @@
+#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -14,36 +15,3 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
----
-package-name: apache-airflow-providers-salesforce
-name: Salesforce
-description: |
-    `Salesforce <https://www.salesforce.com/>`__
-
-versions:
-  - 1.0.0
-
-integrations:
-  - integration-name: Salesforce
-    external-doc-url: https://www.salesforce.com/
-    tags: [service]
-
-operators:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.operators.tableau_refresh_workbook
-
-sensors:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.sensors.tableau_job_status
-
-hooks:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.hooks.salesforce
-      - airflow.providers.salesforce.hooks.tableau
-
-hook-class-names:
-  - airflow.providers.salesforce.hooks.tableau.TableauHook
diff --git a/airflow/providers/salesforce/provider.yaml b/airflow/providers/tableau/example_dags/__init__.py
similarity index 50%
copy from airflow/providers/salesforce/provider.yaml
copy to airflow/providers/tableau/example_dags/__init__.py
index fe739ff..13a8339 100644
--- a/airflow/providers/salesforce/provider.yaml
+++ b/airflow/providers/tableau/example_dags/__init__.py
@@ -14,36 +14,3 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
----
-package-name: apache-airflow-providers-salesforce
-name: Salesforce
-description: |
-    `Salesforce <https://www.salesforce.com/>`__
-
-versions:
-  - 1.0.0
-
-integrations:
-  - integration-name: Salesforce
-    external-doc-url: https://www.salesforce.com/
-    tags: [service]
-
-operators:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.operators.tableau_refresh_workbook
-
-sensors:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.sensors.tableau_job_status
-
-hooks:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.hooks.salesforce
-      - airflow.providers.salesforce.hooks.tableau
-
-hook-class-names:
-  - airflow.providers.salesforce.hooks.tableau.TableauHook
diff --git a/airflow/providers/salesforce/example_dags/example_tableau_refresh_workbook.py b/airflow/providers/tableau/example_dags/example_tableau_refresh_workbook.py
similarity index 92%
rename from airflow/providers/salesforce/example_dags/example_tableau_refresh_workbook.py
rename to airflow/providers/tableau/example_dags/example_tableau_refresh_workbook.py
index 32b347c..da1cc8b 100644
--- a/airflow/providers/salesforce/example_dags/example_tableau_refresh_workbook.py
+++ b/airflow/providers/tableau/example_dags/example_tableau_refresh_workbook.py
@@ -23,8 +23,8 @@ when the operation actually finishes. That's why we have another task that check
 from datetime import timedelta
 
 from airflow import DAG
-from airflow.providers.salesforce.operators.tableau_refresh_workbook import TableauRefreshWorkbookOperator
-from airflow.providers.salesforce.sensors.tableau_job_status import TableauJobStatusSensor
+from airflow.providers.tableau.operators.tableau_refresh_workbook import TableauRefreshWorkbookOperator
+from airflow.providers.tableau.sensors.tableau_job_status import TableauJobStatusSensor
 from airflow.utils.dates import days_ago
 
 DEFAULT_ARGS = {
diff --git a/airflow/providers/salesforce/provider.yaml b/airflow/providers/tableau/hooks/__init__.py
similarity index 50%
copy from airflow/providers/salesforce/provider.yaml
copy to airflow/providers/tableau/hooks/__init__.py
index fe739ff..217e5db 100644
--- a/airflow/providers/salesforce/provider.yaml
+++ b/airflow/providers/tableau/hooks/__init__.py
@@ -1,3 +1,4 @@
+#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -14,36 +15,3 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
----
-package-name: apache-airflow-providers-salesforce
-name: Salesforce
-description: |
-    `Salesforce <https://www.salesforce.com/>`__
-
-versions:
-  - 1.0.0
-
-integrations:
-  - integration-name: Salesforce
-    external-doc-url: https://www.salesforce.com/
-    tags: [service]
-
-operators:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.operators.tableau_refresh_workbook
-
-sensors:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.sensors.tableau_job_status
-
-hooks:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.hooks.salesforce
-      - airflow.providers.salesforce.hooks.tableau
-
-hook-class-names:
-  - airflow.providers.salesforce.hooks.tableau.TableauHook
diff --git a/airflow/providers/salesforce/hooks/tableau.py b/airflow/providers/tableau/hooks/tableau.py
similarity index 100%
copy from airflow/providers/salesforce/hooks/tableau.py
copy to airflow/providers/tableau/hooks/tableau.py
diff --git a/airflow/providers/salesforce/provider.yaml b/airflow/providers/tableau/operators/__init__.py
similarity index 50%
copy from airflow/providers/salesforce/provider.yaml
copy to airflow/providers/tableau/operators/__init__.py
index fe739ff..13a8339 100644
--- a/airflow/providers/salesforce/provider.yaml
+++ b/airflow/providers/tableau/operators/__init__.py
@@ -14,36 +14,3 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
----
-package-name: apache-airflow-providers-salesforce
-name: Salesforce
-description: |
-    `Salesforce <https://www.salesforce.com/>`__
-
-versions:
-  - 1.0.0
-
-integrations:
-  - integration-name: Salesforce
-    external-doc-url: https://www.salesforce.com/
-    tags: [service]
-
-operators:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.operators.tableau_refresh_workbook
-
-sensors:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.sensors.tableau_job_status
-
-hooks:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.hooks.salesforce
-      - airflow.providers.salesforce.hooks.tableau
-
-hook-class-names:
-  - airflow.providers.salesforce.hooks.tableau.TableauHook
diff --git a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py b/airflow/providers/tableau/operators/tableau_refresh_workbook.py
similarity index 95%
copy from airflow/providers/salesforce/operators/tableau_refresh_workbook.py
copy to airflow/providers/tableau/operators/tableau_refresh_workbook.py
index 7d4ffdc..25ca77b 100644
--- a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py
+++ b/airflow/providers/tableau/operators/tableau_refresh_workbook.py
@@ -20,7 +20,7 @@ from tableauserverclient import WorkbookItem
 
 from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
-from airflow.providers.salesforce.hooks.tableau import TableauHook
+from airflow.providers.tableau.hooks.tableau import TableauHook
 from airflow.utils.decorators import apply_defaults
 
 
@@ -71,7 +71,7 @@ class TableauRefreshWorkbookOperator(BaseOperator):
 
             job_id = self._refresh_workbook(tableau_hook, workbook.id)
             if self.blocking:
-                from airflow.providers.salesforce.sensors.tableau_job_status import TableauJobStatusSensor
+                from airflow.providers.tableau.sensors.tableau_job_status import TableauJobStatusSensor
 
                 TableauJobStatusSensor(
                     job_id=job_id,
diff --git a/airflow/providers/salesforce/provider.yaml b/airflow/providers/tableau/provider.yaml
similarity index 61%
copy from airflow/providers/salesforce/provider.yaml
copy to airflow/providers/tableau/provider.yaml
index fe739ff..e777947 100644
--- a/airflow/providers/salesforce/provider.yaml
+++ b/airflow/providers/tableau/provider.yaml
@@ -16,34 +16,34 @@
 # under the License.
 
 ---
-package-name: apache-airflow-providers-salesforce
-name: Salesforce
+package-name: apache-airflow-providers-tableau
+name: Tableau
 description: |
-    `Salesforce <https://www.salesforce.com/>`__
+    `Tableau <https://www.tableau.com/>`__
 
 versions:
   - 1.0.0
 
 integrations:
-  - integration-name: Salesforce
-    external-doc-url: https://www.salesforce.com/
+  - integration-name: Tableau
+    external-doc-url: https://www.tableau.com/
+    logo: /integration-logos/tableau/tableau.png
     tags: [service]
 
 operators:
-  - integration-name: Salesforce
+  - integration-name: Tableau
     python-modules:
-      - airflow.providers.salesforce.operators.tableau_refresh_workbook
+      - airflow.providers.tableau.operators.tableau_refresh_workbook
 
 sensors:
-  - integration-name: Salesforce
+  - integration-name: Tableau
     python-modules:
-      - airflow.providers.salesforce.sensors.tableau_job_status
+      - airflow.providers.tableau.sensors.tableau_job_status
 
 hooks:
-  - integration-name: Salesforce
+  - integration-name: Tableau
     python-modules:
-      - airflow.providers.salesforce.hooks.salesforce
-      - airflow.providers.salesforce.hooks.tableau
+      - airflow.providers.tableau.hooks.tableau
 
 hook-class-names:
-  - airflow.providers.salesforce.hooks.tableau.TableauHook
+  - airflow.providers.tableau.hooks.tableau.TableauHook
diff --git a/airflow/providers/salesforce/provider.yaml b/airflow/providers/tableau/sensors/__init__.py
similarity index 50%
copy from airflow/providers/salesforce/provider.yaml
copy to airflow/providers/tableau/sensors/__init__.py
index fe739ff..13a8339 100644
--- a/airflow/providers/salesforce/provider.yaml
+++ b/airflow/providers/tableau/sensors/__init__.py
@@ -14,36 +14,3 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
----
-package-name: apache-airflow-providers-salesforce
-name: Salesforce
-description: |
-    `Salesforce <https://www.salesforce.com/>`__
-
-versions:
-  - 1.0.0
-
-integrations:
-  - integration-name: Salesforce
-    external-doc-url: https://www.salesforce.com/
-    tags: [service]
-
-operators:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.operators.tableau_refresh_workbook
-
-sensors:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.sensors.tableau_job_status
-
-hooks:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.hooks.salesforce
-      - airflow.providers.salesforce.hooks.tableau
-
-hook-class-names:
-  - airflow.providers.salesforce.hooks.tableau.TableauHook
diff --git a/airflow/providers/salesforce/sensors/tableau_job_status.py b/airflow/providers/tableau/sensors/tableau_job_status.py
similarity index 96%
copy from airflow/providers/salesforce/sensors/tableau_job_status.py
copy to airflow/providers/tableau/sensors/tableau_job_status.py
index 4939203..518e2f0 100644
--- a/airflow/providers/salesforce/sensors/tableau_job_status.py
+++ b/airflow/providers/tableau/sensors/tableau_job_status.py
@@ -17,7 +17,7 @@
 from typing import Optional
 
 from airflow.exceptions import AirflowException
-from airflow.providers.salesforce.hooks.tableau import TableauHook, TableauJobFinishCode
+from airflow.providers.tableau.hooks.tableau import TableauHook, TableauJobFinishCode
 from airflow.sensors.base import BaseSensorOperator
 from airflow.utils.decorators import apply_defaults
 
diff --git a/airflow/providers/salesforce/CHANGELOG.rst b/docs/apache-airflow-providers-tableau/index.rst
similarity index 56%
copy from airflow/providers/salesforce/CHANGELOG.rst
copy to docs/apache-airflow-providers-tableau/index.rst
index cef7dda..47ace94 100644
--- a/airflow/providers/salesforce/CHANGELOG.rst
+++ b/docs/apache-airflow-providers-tableau/index.rst
@@ -1,3 +1,4 @@
+
  .. Licensed to the Apache Software Foundation (ASF) under one
     or more contributor license agreements.  See the NOTICE file
     distributed with this work for additional information
@@ -15,11 +16,29 @@
     specific language governing permissions and limitations
     under the License.
 
+``apache-airflow-providers-tableau``
+=======================================
+
+Content
+-------
+
+.. toctree::
+    :maxdepth: 1
+    :caption: Guides
+
+    Connection types <connections/tableau>
+
+.. toctree::
+    :maxdepth: 1
+    :caption: References
+
+    Python API <_api/airflow/providers/tableau/index>
 
-Changelog
----------
+.. toctree::
+    :maxdepth: 1
+    :caption: Resources
 
-1.0.0
-.....
+    Example DAGs <https://github.com/apache/airflow/tree/master/airflow/providers/tableau/example_dags>
+    PyPI Repository <https://pypi.org/project/apache-airflow-providers-tableau/>
 
-Initial version of the provider.
+.. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME!
diff --git a/docs/integration-logos/tableau/tableau.png b/docs/integration-logos/tableau/tableau.png
new file mode 100644
index 0000000..4ec356c
Binary files /dev/null and b/docs/integration-logos/tableau/tableau.png differ
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index 71f9e34..0e89285 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -1280,6 +1280,7 @@ sync'ed
 sys
 syspath
 systemd
+tableau
 tableauserverclient
 tablefmt
 tagKey
diff --git a/scripts/in_container/run_install_and_test_provider_packages.sh b/scripts/in_container/run_install_and_test_provider_packages.sh
index 76d41e4..5eb039a 100755
--- a/scripts/in_container/run_install_and_test_provider_packages.sh
+++ b/scripts/in_container/run_install_and_test_provider_packages.sh
@@ -95,7 +95,7 @@ function discover_all_provider_packages() {
     # Columns is to force it wider, so it doesn't wrap at 80 characters
     COLUMNS=180 airflow providers list
 
-    local expected_number_of_providers=63
+    local expected_number_of_providers=64
     local actual_number_of_providers
     actual_providers=$(airflow providers list --output yaml | grep package_name)
     actual_number_of_providers=$(wc -l <<<"$actual_providers")
diff --git a/setup.py b/setup.py
index 2867b36..4ee7a5c 100644
--- a/setup.py
+++ b/setup.py
@@ -444,7 +444,7 @@ statsd = [
     'statsd>=3.3.0, <4.0',
 ]
 tableau = [
-    'tableauserverclient~=0.12',
+    'tableauserverclient',
 ]
 telegram = [
     'python-telegram-bot==13.0',
@@ -576,6 +576,7 @@ PROVIDERS_REQUIREMENTS: Dict[str, List[str]] = {
     'snowflake': snowflake,
     'sqlite': [],
     'ssh': ssh,
+    'tableau': tableau,
     'telegram': telegram,
     'vertica': vertica,
     'yandex': yandex,
@@ -608,7 +609,6 @@ CORE_EXTRAS_REQUIREMENTS: Dict[str, List[str]] = {
     'rabbitmq': rabbitmq,
     'sentry': sentry,
     'statsd': statsd,
-    'tableau': tableau,
     'virtualenv': virtualenv,
 }
 
diff --git a/tests/core/test_providers_manager.py b/tests/core/test_providers_manager.py
index 39ee588..9112d5e 100644
--- a/tests/core/test_providers_manager.py
+++ b/tests/core/test_providers_manager.py
@@ -81,6 +81,7 @@ ALL_PROVIDERS = [
     # 'apache-airflow-providers-snowflake',
     'apache-airflow-providers-sqlite',
     'apache-airflow-providers-ssh',
+    'apache-airflow-providers-tableau',
     'apache-airflow-providers-telegram',
     'apache-airflow-providers-vertica',
     'apache-airflow-providers-yandex',
diff --git a/airflow/providers/salesforce/provider.yaml b/tests/providers/tableau/hooks/__init__.py
similarity index 50%
copy from airflow/providers/salesforce/provider.yaml
copy to tests/providers/tableau/hooks/__init__.py
index fe739ff..217e5db 100644
--- a/airflow/providers/salesforce/provider.yaml
+++ b/tests/providers/tableau/hooks/__init__.py
@@ -1,3 +1,4 @@
+#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -14,36 +15,3 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
----
-package-name: apache-airflow-providers-salesforce
-name: Salesforce
-description: |
-    `Salesforce <https://www.salesforce.com/>`__
-
-versions:
-  - 1.0.0
-
-integrations:
-  - integration-name: Salesforce
-    external-doc-url: https://www.salesforce.com/
-    tags: [service]
-
-operators:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.operators.tableau_refresh_workbook
-
-sensors:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.sensors.tableau_job_status
-
-hooks:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.hooks.salesforce
-      - airflow.providers.salesforce.hooks.tableau
-
-hook-class-names:
-  - airflow.providers.salesforce.hooks.tableau.TableauHook
diff --git a/tests/providers/salesforce/hooks/test_tableau.py b/tests/providers/tableau/hooks/test_tableau.py
similarity index 81%
rename from tests/providers/salesforce/hooks/test_tableau.py
rename to tests/providers/tableau/hooks/test_tableau.py
index 130746d..66ecdf7 100644
--- a/tests/providers/salesforce/hooks/test_tableau.py
+++ b/tests/providers/tableau/hooks/test_tableau.py
@@ -19,12 +19,19 @@ import unittest
 from unittest.mock import patch
 
 from airflow import configuration, models
-from airflow.providers.salesforce.hooks.tableau import TableauHook
+from airflow.providers.tableau.hooks.tableau import TableauHook
 from airflow.utils import db
 
 
 class TestTableauHook(unittest.TestCase):
+    """
+    Test class for TableauHook
+    """
+
     def setUp(self):
+        """
+        setup
+        """
         configuration.conf.load_test_config()
 
         db.merge_conn(
@@ -46,9 +53,12 @@ class TestTableauHook(unittest.TestCase):
             )
         )
 
-    @patch('airflow.providers.salesforce.hooks.tableau.TableauAuth')
-    @patch('airflow.providers.salesforce.hooks.tableau.Server')
+    @patch('airflow.providers.tableau.hooks.tableau.TableauAuth')
+    @patch('airflow.providers.tableau.hooks.tableau.Server')
     def test_get_conn_auth_via_password_and_site_in_connection(self, mock_server, mock_tableau_auth):
+        """
+        Test get conn auth via password
+        """
         with TableauHook(tableau_conn_id='tableau_test_password') as tableau_hook:
             mock_server.assert_called_once_with(tableau_hook.conn.host, use_server_version=True)
             mock_tableau_auth.assert_called_once_with(
@@ -59,9 +69,12 @@ class TestTableauHook(unittest.TestCase):
             mock_server.return_value.auth.sign_in.assert_called_once_with(mock_tableau_auth.return_value)
         mock_server.return_value.auth.sign_out.assert_called_once_with()
 
-    @patch('airflow.providers.salesforce.hooks.tableau.PersonalAccessTokenAuth')
-    @patch('airflow.providers.salesforce.hooks.tableau.Server')
+    @patch('airflow.providers.tableau.hooks.tableau.PersonalAccessTokenAuth')
+    @patch('airflow.providers.tableau.hooks.tableau.Server')
     def test_get_conn_auth_via_token_and_site_in_init(self, mock_server, mock_tableau_auth):
+        """
+        Test get conn auth via token
+        """
         with TableauHook(site_id='test', tableau_conn_id='tableau_test_token') as tableau_hook:
             mock_server.assert_called_once_with(tableau_hook.conn.host, use_server_version=True)
             mock_tableau_auth.assert_called_once_with(
@@ -74,10 +87,13 @@ class TestTableauHook(unittest.TestCase):
             )
         mock_server.return_value.auth.sign_out.assert_called_once_with()
 
-    @patch('airflow.providers.salesforce.hooks.tableau.TableauAuth')
-    @patch('airflow.providers.salesforce.hooks.tableau.Server')
-    @patch('airflow.providers.salesforce.hooks.tableau.Pager', return_value=[1, 2, 3])
+    @patch('airflow.providers.tableau.hooks.tableau.TableauAuth')
+    @patch('airflow.providers.tableau.hooks.tableau.Server')
+    @patch('airflow.providers.tableau.hooks.tableau.Pager', return_value=[1, 2, 3])
     def test_get_all(self, mock_pager, mock_server, mock_tableau_auth):  # pylint: disable=unused-argument
+        """
+        Test get all
+        """
         with TableauHook(tableau_conn_id='tableau_test_password') as tableau_hook:
             jobs = tableau_hook.get_all(resource_name='jobs')
             assert jobs == mock_pager.return_value
diff --git a/airflow/providers/salesforce/provider.yaml b/tests/providers/tableau/operators/__init__.py
similarity index 50%
copy from airflow/providers/salesforce/provider.yaml
copy to tests/providers/tableau/operators/__init__.py
index fe739ff..13a8339 100644
--- a/airflow/providers/salesforce/provider.yaml
+++ b/tests/providers/tableau/operators/__init__.py
@@ -14,36 +14,3 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
----
-package-name: apache-airflow-providers-salesforce
-name: Salesforce
-description: |
-    `Salesforce <https://www.salesforce.com/>`__
-
-versions:
-  - 1.0.0
-
-integrations:
-  - integration-name: Salesforce
-    external-doc-url: https://www.salesforce.com/
-    tags: [service]
-
-operators:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.operators.tableau_refresh_workbook
-
-sensors:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.sensors.tableau_job_status
-
-hooks:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.hooks.salesforce
-      - airflow.providers.salesforce.hooks.tableau
-
-hook-class-names:
-  - airflow.providers.salesforce.hooks.tableau.TableauHook
diff --git a/tests/providers/salesforce/operators/test_tableau_refresh_workbook.py b/tests/providers/tableau/operators/test_tableau_refresh_workbook.py
similarity index 80%
rename from tests/providers/salesforce/operators/test_tableau_refresh_workbook.py
rename to tests/providers/tableau/operators/test_tableau_refresh_workbook.py
index 77139c1..72377a5 100644
--- a/tests/providers/salesforce/operators/test_tableau_refresh_workbook.py
+++ b/tests/providers/tableau/operators/test_tableau_refresh_workbook.py
@@ -21,11 +21,18 @@ from unittest.mock import Mock, patch
 import pytest
 
 from airflow.exceptions import AirflowException
-from airflow.providers.salesforce.operators.tableau_refresh_workbook import TableauRefreshWorkbookOperator
+from airflow.providers.tableau.operators.tableau_refresh_workbook import TableauRefreshWorkbookOperator
 
 
 class TestTableauRefreshWorkbookOperator(unittest.TestCase):
+    """
+    Test class for TableauRefreshWorkbookOperator
+    """
+
     def setUp(self):
+        """
+        setup
+        """
         self.mocked_workbooks = []
         for i in range(3):
             mock_workbook = Mock()
@@ -34,8 +41,11 @@ class TestTableauRefreshWorkbookOperator(unittest.TestCase):
             self.mocked_workbooks.append(mock_workbook)
         self.kwargs = {'site_id': 'test_site', 'task_id': 'task', 'dag': None}
 
-    @patch('airflow.providers.salesforce.operators.tableau_refresh_workbook.TableauHook')
+    @patch('airflow.providers.tableau.operators.tableau_refresh_workbook.TableauHook')
     def test_execute(self, mock_tableau_hook):
+        """
+        Test Execute
+        """
         mock_tableau_hook.get_all = Mock(return_value=self.mocked_workbooks)
         mock_tableau_hook.return_value.__enter__ = Mock(return_value=mock_tableau_hook)
         operator = TableauRefreshWorkbookOperator(blocking=False, workbook_name='wb_2', **self.kwargs)
@@ -45,9 +55,12 @@ class TestTableauRefreshWorkbookOperator(unittest.TestCase):
         mock_tableau_hook.server.workbooks.refresh.assert_called_once_with(2)
         assert mock_tableau_hook.server.workbooks.refresh.return_value.id == job_id
 
-    @patch('airflow.providers.salesforce.sensors.tableau_job_status.TableauJobStatusSensor')
-    @patch('airflow.providers.salesforce.operators.tableau_refresh_workbook.TableauHook')
+    @patch('airflow.providers.tableau.sensors.tableau_job_status.TableauJobStatusSensor')
+    @patch('airflow.providers.tableau.operators.tableau_refresh_workbook.TableauHook')
     def test_execute_blocking(self, mock_tableau_hook, mock_tableau_job_status_sensor):
+        """
+        Test execute blocking
+        """
         mock_tableau_hook.get_all = Mock(return_value=self.mocked_workbooks)
         mock_tableau_hook.return_value.__enter__ = Mock(return_value=mock_tableau_hook)
         operator = TableauRefreshWorkbookOperator(workbook_name='wb_2', **self.kwargs)
@@ -64,8 +77,11 @@ class TestTableauRefreshWorkbookOperator(unittest.TestCase):
             dag=None,
         )
 
-    @patch('airflow.providers.salesforce.operators.tableau_refresh_workbook.TableauHook')
+    @patch('airflow.providers.tableau.operators.tableau_refresh_workbook.TableauHook')
     def test_execute_missing_workbook(self, mock_tableau_hook):
+        """
+        Test execute missing workbook
+        """
         mock_tableau_hook.get_all = Mock(return_value=self.mocked_workbooks)
         mock_tableau_hook.return_value.__enter__ = Mock(return_value=mock_tableau_hook)
         operator = TableauRefreshWorkbookOperator(workbook_name='test', **self.kwargs)
diff --git a/airflow/providers/salesforce/provider.yaml b/tests/providers/tableau/sensors/__init__.py
similarity index 50%
copy from airflow/providers/salesforce/provider.yaml
copy to tests/providers/tableau/sensors/__init__.py
index fe739ff..13a8339 100644
--- a/airflow/providers/salesforce/provider.yaml
+++ b/tests/providers/tableau/sensors/__init__.py
@@ -14,36 +14,3 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
----
-package-name: apache-airflow-providers-salesforce
-name: Salesforce
-description: |
-    `Salesforce <https://www.salesforce.com/>`__
-
-versions:
-  - 1.0.0
-
-integrations:
-  - integration-name: Salesforce
-    external-doc-url: https://www.salesforce.com/
-    tags: [service]
-
-operators:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.operators.tableau_refresh_workbook
-
-sensors:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.sensors.tableau_job_status
-
-hooks:
-  - integration-name: Salesforce
-    python-modules:
-      - airflow.providers.salesforce.hooks.salesforce
-      - airflow.providers.salesforce.hooks.tableau
-
-hook-class-names:
-  - airflow.providers.salesforce.hooks.tableau.TableauHook
diff --git a/tests/providers/salesforce/sensors/test_tableau_job_status.py b/tests/providers/tableau/sensors/test_tableau_job_status.py
similarity index 84%
rename from tests/providers/salesforce/sensors/test_tableau_job_status.py
rename to tests/providers/tableau/sensors/test_tableau_job_status.py
index 7f01011..ea6eeb2 100644
--- a/tests/providers/salesforce/sensors/test_tableau_job_status.py
+++ b/tests/providers/tableau/sensors/test_tableau_job_status.py
@@ -21,18 +21,25 @@ from unittest.mock import Mock, patch
 import pytest
 from parameterized import parameterized
 
-from airflow.providers.salesforce.sensors.tableau_job_status import (
+from airflow.providers.tableau.sensors.tableau_job_status import (
     TableauJobFailedException,
     TableauJobStatusSensor,
 )
 
 
 class TestTableauJobStatusSensor(unittest.TestCase):
+    """
+    Test Class for JobStatusSensor
+    """
+
     def setUp(self):
         self.kwargs = {'job_id': 'job_2', 'site_id': 'test_site', 'task_id': 'task', 'dag': None}
 
-    @patch('airflow.providers.salesforce.sensors.tableau_job_status.TableauHook')
+    @patch('airflow.providers.tableau.sensors.tableau_job_status.TableauHook')
     def test_poke(self, mock_tableau_hook):
+        """
+        Test poke
+        """
         mock_tableau_hook.return_value.__enter__ = Mock(return_value=mock_tableau_hook)
         mock_get = mock_tableau_hook.server.jobs.get_by_id
         mock_get.return_value.finish_code = '0'
@@ -44,8 +51,11 @@ class TestTableauJobStatusSensor(unittest.TestCase):
         mock_get.assert_called_once_with(sensor.job_id)
 
     @parameterized.expand([('1',), ('2',)])
-    @patch('airflow.providers.salesforce.sensors.tableau_job_status.TableauHook')
+    @patch('airflow.providers.tableau.sensors.tableau_job_status.TableauHook')
     def test_poke_failed(self, finish_code, mock_tableau_hook):
+        """
+        Test poke failed
+        """
         mock_tableau_hook.return_value.__enter__ = Mock(return_value=mock_tableau_hook)
         mock_get = mock_tableau_hook.server.jobs.get_by_id
         mock_get.return_value.finish_code = finish_code


[airflow] 09/41: Correct PostgreSQL password in doc example code (#14256)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit c8012cb5dd9a76850f23aa020ec51a036d739c58
Author: Kevin Trebing <Ke...@gmx.net>
AuthorDate: Tue Feb 16 17:54:14 2021 +0100

    Correct PostgreSQL password in doc example code (#14256)
    
    The example code block for PostgreSQL sets the password to the ``airflow_user`` instead of ``airflow_pass``.
    
    (cherry picked from commit 9ea0bc700c7974fafc6d98344eb59ee14c19bcd4)
---
 docs/apache-airflow/howto/set-up-database.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/apache-airflow/howto/set-up-database.rst b/docs/apache-airflow/howto/set-up-database.rst
index 0d4f578..54657db 100644
--- a/docs/apache-airflow/howto/set-up-database.rst
+++ b/docs/apache-airflow/howto/set-up-database.rst
@@ -147,7 +147,7 @@ In the example below, a database ``airflow_db`` and user  with username ``airflo
 .. code-block:: sql
 
    CREATE DATABASE airflow_db;
-   CREATE USER airflow_user WITH PASSWORD 'airflow_user';
+   CREATE USER airflow_user WITH PASSWORD 'airflow_pass';
    GRANT ALL PRIVILEGES ON DATABASE airflow_db TO airflow_user;
 
 You may need to update your Postgres ``pg_hba.conf`` to add the


[airflow] 25/41: Salesforce provider requires tableau (#13593)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 01509601785524b06dd3438d057ace8db62129d6
Author: Daniel Standish <ds...@pax.com>
AuthorDate: Sun Jan 10 02:20:34 2021 -0800

    Salesforce provider requires tableau (#13593)
    
    Co-authored-by: Daniel Standish <ds...@users.noreply.github.com>
    (cherry picked from commit 46edea3411498a4c2e1d8840ba0dcd93daa1e25f)
---
 setup.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setup.py b/setup.py
index 628ecd1..75f5db5 100644
--- a/setup.py
+++ b/setup.py
@@ -403,6 +403,7 @@ redis = [
 ]
 salesforce = [
     'simple-salesforce>=1.0.0',
+    'tableauserverclient',
 ]
 samba = [
     'pysmbclient>=0.1.3',


[airflow] 10/41: Fix misleading statement on sqlite (#14317)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 25e482e03bdab7ff65dc61c43333e6868a13f7ae
Author: Arun Kumar <pa...@users.noreply.github.com>
AuthorDate: Fri Feb 19 16:25:26 2021 +0530

    Fix misleading statement on sqlite (#14317)
    
    The statement
    ```
    By default, Airflow uses **SQLite**, which is *not* intended for development purposes only.
    ```
    is confusing. If `postgres/mysql` are production worthy db backends, and `sqlite` as default db for `airflow` is for development purposes only, this statement is not correct. If I'm mistaken and `sqlite` is for both `production` and `development` purposes, please ignore this PR
    
    (cherry picked from commit 0d366c1f297ec25b5f8e01c6d72a312e2ee64d4d)
---
 docs/apache-airflow/howto/set-up-database.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/apache-airflow/howto/set-up-database.rst b/docs/apache-airflow/howto/set-up-database.rst
index 54657db..153ca80 100644
--- a/docs/apache-airflow/howto/set-up-database.rst
+++ b/docs/apache-airflow/howto/set-up-database.rst
@@ -28,7 +28,7 @@ Choosing database backend
 -------------------------
 
 If you want to take a real test drive of Airflow, you should consider setting up a database backend to **MySQL** and **PostgresSQL**.
-By default, Airflow uses **SQLite**, which is not intended for development purposes only.
+By default, Airflow uses **SQLite**, which is intended for development purposes only.
 
 Airflow supports the following database engine versions, so make sure which version you have. Old versions may not support all SQL statements.
 


[airflow] 04/41: Fix breeze redirect on macOS (#14506)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 3c39b5d446bbc6974a7b655163240de7017f33ca
Author: Vladimir Mikhaylov <38...@users.noreply.github.com>
AuthorDate: Sat Feb 27 17:15:49 2021 +0300

    Fix breeze redirect on macOS (#14506)
    
    (cherry picked from commit f9cc775adc0dff49b0e288d8f2745bf097017321)
---
 breeze | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/breeze b/breeze
index 124ed85..bc63046 100755
--- a/breeze
+++ b/breeze
@@ -24,7 +24,9 @@ AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
 if [[ ${BREEZE_REDIRECT=} == "" ]]; then
     mkdir -p "${AIRFLOW_SOURCES}"/logs
     export BREEZE_REDIRECT="true"
+    set +u
     "${0}" "${@}" 2>&1 | tee "${AIRFLOW_SOURCES}"/logs/breeze.out
+    set -u
     exit
 fi
 


[airflow] 29/41: Refactor DataprocOperators to support google-cloud-dataproc 2.0 (#13256)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 30bad8181921363954996808d2d34dd1e525371c
Author: Tomek Urbaszek <tu...@gmail.com>
AuthorDate: Mon Jan 18 17:49:19 2021 +0100

    Refactor DataprocOperators to support google-cloud-dataproc 2.0 (#13256)
    
    (cherry picked from commit 309788e5e2023c598095a4ee00df417d94b6a5df)
---
 airflow/providers/google/ADDITIONAL_INFO.md        |   2 +
 airflow/providers/google/cloud/hooks/dataproc.py   | 104 ++++++++---------
 .../providers/google/cloud/operators/dataproc.py   |  30 +++--
 airflow/providers/google/cloud/sensors/dataproc.py |  12 +-
 setup.py                                           |   2 +-
 .../providers/google/cloud/hooks/test_dataproc.py  | 129 ++++++++++++---------
 .../google/cloud/operators/test_dataproc.py        |  14 ++-
 .../google/cloud/sensors/test_dataproc.py          |   8 +-
 8 files changed, 157 insertions(+), 144 deletions(-)

diff --git a/airflow/providers/google/ADDITIONAL_INFO.md b/airflow/providers/google/ADDITIONAL_INFO.md
index c696e1b..16a6683 100644
--- a/airflow/providers/google/ADDITIONAL_INFO.md
+++ b/airflow/providers/google/ADDITIONAL_INFO.md
@@ -32,11 +32,13 @@ Details are covered in the UPDATING.md files for each library, but there are som
 | [``google-cloud-automl``](https://pypi.org/project/google-cloud-automl/) | ``>=0.4.0,<2.0.0`` | ``>=2.1.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-bigquery-automl/blob/master/UPGRADING.md) |
 | [``google-cloud-bigquery-datatransfer``](https://pypi.org/project/google-cloud-bigquery-datatransfer/) | ``>=0.4.0,<2.0.0`` | ``>=3.0.0,<4.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-bigquery-datatransfer/blob/master/UPGRADING.md) |
 | [``google-cloud-datacatalog``](https://pypi.org/project/google-cloud-datacatalog/) | ``>=0.5.0,<0.8`` | ``>=3.0.0,<4.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-datacatalog/blob/master/UPGRADING.md) |
+| [``google-cloud-dataproc``](https://pypi.org/project/google-cloud-dataproc/) | ``>=1.0.1,<2.0.0`` | ``>=2.2.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-dataproc/blob/master/UPGRADING.md) |
 | [``google-cloud-kms``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-kms/blob/master/UPGRADING.md) |
 | [``google-cloud-os-login``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-oslogin/blob/master/UPGRADING.md) |
 | [``google-cloud-pubsub``](https://pypi.org/project/google-cloud-pubsub/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-pubsub/blob/master/UPGRADING.md) |
 | [``google-cloud-tasks``](https://pypi.org/project/google-cloud-tasks/) | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-tasks/blob/master/UPGRADING.md) |
 
+
 ### The field names use the snake_case convention
 
 If your DAG uses an object from the above mentioned libraries passed by XCom, it is necessary to update the naming convention of the fields that are read. Previously, the fields used the CamelSnake convention, now the snake_case convention is used.
diff --git a/airflow/providers/google/cloud/hooks/dataproc.py b/airflow/providers/google/cloud/hooks/dataproc.py
index 12d5941..35d4786 100644
--- a/airflow/providers/google/cloud/hooks/dataproc.py
+++ b/airflow/providers/google/cloud/hooks/dataproc.py
@@ -26,18 +26,16 @@ from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
 from google.api_core.exceptions import ServerError
 from google.api_core.retry import Retry
 from google.cloud.dataproc_v1beta2 import (  # pylint: disable=no-name-in-module
-    ClusterControllerClient,
-    JobControllerClient,
-    WorkflowTemplateServiceClient,
-)
-from google.cloud.dataproc_v1beta2.types import (  # pylint: disable=no-name-in-module
     Cluster,
-    Duration,
-    FieldMask,
+    ClusterControllerClient,
     Job,
+    JobControllerClient,
     JobStatus,
     WorkflowTemplate,
+    WorkflowTemplateServiceClient,
 )
+from google.protobuf.duration_pb2 import Duration
+from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow.exceptions import AirflowException
 from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
@@ -291,10 +289,12 @@ class DataprocHook(GoogleBaseHook):
 
         client = self.get_cluster_client(location=region)
         result = client.create_cluster(
-            project_id=project_id,
-            region=region,
-            cluster=cluster,
-            request_id=request_id,
+            request={
+                'project_id': project_id,
+                'region': region,
+                'cluster': cluster,
+                'request_id': request_id,
+            },
             retry=retry,
             timeout=timeout,
             metadata=metadata,
@@ -340,11 +340,13 @@ class DataprocHook(GoogleBaseHook):
         """
         client = self.get_cluster_client(location=region)
         result = client.delete_cluster(
-            project_id=project_id,
-            region=region,
-            cluster_name=cluster_name,
-            cluster_uuid=cluster_uuid,
-            request_id=request_id,
+            request={
+                'project_id': project_id,
+                'region': region,
+                'cluster_name': cluster_name,
+                'cluster_uuid': cluster_uuid,
+                'request_id': request_id,
+            },
             retry=retry,
             timeout=timeout,
             metadata=metadata,
@@ -382,9 +384,7 @@ class DataprocHook(GoogleBaseHook):
         """
         client = self.get_cluster_client(location=region)
         operation = client.diagnose_cluster(
-            project_id=project_id,
-            region=region,
-            cluster_name=cluster_name,
+            request={'project_id': project_id, 'region': region, 'cluster_name': cluster_name},
             retry=retry,
             timeout=timeout,
             metadata=metadata,
@@ -423,9 +423,7 @@ class DataprocHook(GoogleBaseHook):
         """
         client = self.get_cluster_client(location=region)
         result = client.get_cluster(
-            project_id=project_id,
-            region=region,
-            cluster_name=cluster_name,
+            request={'project_id': project_id, 'region': region, 'cluster_name': cluster_name},
             retry=retry,
             timeout=timeout,
             metadata=metadata,
@@ -467,10 +465,7 @@ class DataprocHook(GoogleBaseHook):
         """
         client = self.get_cluster_client(location=region)
         result = client.list_clusters(
-            project_id=project_id,
-            region=region,
-            filter_=filter_,
-            page_size=page_size,
+            request={'project_id': project_id, 'region': region, 'filter': filter_, 'page_size': page_size},
             retry=retry,
             timeout=timeout,
             metadata=metadata,
@@ -551,13 +546,15 @@ class DataprocHook(GoogleBaseHook):
         """
         client = self.get_cluster_client(location=location)
         operation = client.update_cluster(
-            project_id=project_id,
-            region=location,
-            cluster_name=cluster_name,
-            cluster=cluster,
-            update_mask=update_mask,
-            graceful_decommission_timeout=graceful_decommission_timeout,
-            request_id=request_id,
+            request={
+                'project_id': project_id,
+                'region': location,
+                'cluster_name': cluster_name,
+                'cluster': cluster,
+                'update_mask': update_mask,
+                'graceful_decommission_timeout': graceful_decommission_timeout,
+                'request_id': request_id,
+            },
             retry=retry,
             timeout=timeout,
             metadata=metadata,
@@ -593,10 +590,11 @@ class DataprocHook(GoogleBaseHook):
         :param metadata: Additional metadata that is provided to the method.
         :type metadata: Sequence[Tuple[str, str]]
         """
+        metadata = metadata or ()
         client = self.get_template_client(location)
-        parent = client.region_path(project_id, location)
+        parent = f'projects/{project_id}/regions/{location}'
         return client.create_workflow_template(
-            parent=parent, template=template, retry=retry, timeout=timeout, metadata=metadata
+            request={'parent': parent, 'template': template}, retry=retry, timeout=timeout, metadata=metadata
         )
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -643,13 +641,11 @@ class DataprocHook(GoogleBaseHook):
         :param metadata: Additional metadata that is provided to the method.
         :type metadata: Sequence[Tuple[str, str]]
         """
+        metadata = metadata or ()
         client = self.get_template_client(location)
-        name = client.workflow_template_path(project_id, location, template_name)
+        name = f'projects/{project_id}/regions/{location}/workflowTemplates/{template_name}'
         operation = client.instantiate_workflow_template(
-            name=name,
-            version=version,
-            parameters=parameters,
-            request_id=request_id,
+            request={'name': name, 'version': version, 'request_id': request_id, 'parameters': parameters},
             retry=retry,
             timeout=timeout,
             metadata=metadata,
@@ -690,12 +686,11 @@ class DataprocHook(GoogleBaseHook):
         :param metadata: Additional metadata that is provided to the method.
         :type metadata: Sequence[Tuple[str, str]]
         """
+        metadata = metadata or ()
         client = self.get_template_client(location)
-        parent = client.region_path(project_id, location)
+        parent = f'projects/{project_id}/regions/{location}'
         operation = client.instantiate_inline_workflow_template(
-            parent=parent,
-            template=template,
-            request_id=request_id,
+            request={'parent': parent, 'template': template, 'request_id': request_id},
             retry=retry,
             timeout=timeout,
             metadata=metadata,
@@ -722,19 +717,19 @@ class DataprocHook(GoogleBaseHook):
         """
         state = None
         start = time.monotonic()
-        while state not in (JobStatus.ERROR, JobStatus.DONE, JobStatus.CANCELLED):
+        while state not in (JobStatus.State.ERROR, JobStatus.State.DONE, JobStatus.State.CANCELLED):
             if timeout and start + timeout < time.monotonic():
                 raise AirflowException(f"Timeout: dataproc job {job_id} is not ready after {timeout}s")
             time.sleep(wait_time)
             try:
-                job = self.get_job(location=location, job_id=job_id, project_id=project_id)
+                job = self.get_job(project_id=project_id, location=location, job_id=job_id)
                 state = job.status.state
             except ServerError as err:
                 self.log.info("Retrying. Dataproc API returned server error when waiting for job: %s", err)
 
-        if state == JobStatus.ERROR:
+        if state == JobStatus.State.ERROR:
             raise AirflowException(f'Job failed:\n{job}')
-        if state == JobStatus.CANCELLED:
+        if state == JobStatus.State.CANCELLED:
             raise AirflowException(f'Job was cancelled:\n{job}')
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -767,9 +762,7 @@ class DataprocHook(GoogleBaseHook):
         """
         client = self.get_job_client(location=location)
         job = client.get_job(
-            project_id=project_id,
-            region=location,
-            job_id=job_id,
+            request={'project_id': project_id, 'region': location, 'job_id': job_id},
             retry=retry,
             timeout=timeout,
             metadata=metadata,
@@ -812,10 +805,7 @@ class DataprocHook(GoogleBaseHook):
         """
         client = self.get_job_client(location=location)
         return client.submit_job(
-            project_id=project_id,
-            region=location,
-            job=job,
-            request_id=request_id,
+            request={'project_id': project_id, 'region': location, 'job': job, 'request_id': request_id},
             retry=retry,
             timeout=timeout,
             metadata=metadata,
@@ -884,9 +874,7 @@ class DataprocHook(GoogleBaseHook):
         client = self.get_job_client(location=location)
 
         job = client.cancel_job(
-            project_id=project_id,
-            region=location,
-            job_id=job_id,
+            request={'project_id': project_id, 'region': location, 'job_id': job_id},
             retry=retry,
             timeout=timeout,
             metadata=metadata,
diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py
index 3bcb0aa..731e34f 100644
--- a/airflow/providers/google/cloud/operators/dataproc.py
+++ b/airflow/providers/google/cloud/operators/dataproc.py
@@ -17,7 +17,6 @@
 # under the License.
 #
 """This module contains Google Dataproc operators."""
-# pylint: disable=C0302
 
 import inspect
 import ntpath
@@ -31,12 +30,9 @@ from typing import Dict, List, Optional, Sequence, Set, Tuple, Union
 
 from google.api_core.exceptions import AlreadyExists, NotFound
 from google.api_core.retry import Retry, exponential_sleep_generator
-from google.cloud.dataproc_v1beta2.types import (  # pylint: disable=no-name-in-module
-    Cluster,
-    Duration,
-    FieldMask,
-)
-from google.protobuf.json_format import MessageToDict
+from google.cloud.dataproc_v1beta2 import Cluster  # pylint: disable=no-name-in-module
+from google.protobuf.duration_pb2 import Duration
+from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
@@ -562,7 +558,7 @@ class DataprocCreateClusterOperator(BaseOperator):
         )
 
     def _handle_error_state(self, hook: DataprocHook, cluster: Cluster) -> None:
-        if cluster.status.state != cluster.status.ERROR:
+        if cluster.status.state != cluster.status.State.ERROR:
             return
         self.log.info("Cluster is in ERROR state")
         gcs_uri = hook.diagnose_cluster(
@@ -590,7 +586,7 @@ class DataprocCreateClusterOperator(BaseOperator):
         time_left = self.timeout
         cluster = self._get_cluster(hook)
         for time_to_sleep in exponential_sleep_generator(initial=10, maximum=120):
-            if cluster.status.state != cluster.status.CREATING:
+            if cluster.status.state != cluster.status.State.CREATING:
                 break
             if time_left < 0:
                 raise AirflowException(f"Cluster {self.cluster_name} is still CREATING state, aborting")
@@ -613,18 +609,18 @@ class DataprocCreateClusterOperator(BaseOperator):
 
         # Check if cluster is not in ERROR state
         self._handle_error_state(hook, cluster)
-        if cluster.status.state == cluster.status.CREATING:
+        if cluster.status.state == cluster.status.State.CREATING:
             # Wait for cluster to be be created
             cluster = self._wait_for_cluster_in_creating_state(hook)
             self._handle_error_state(hook, cluster)
-        elif cluster.status.state == cluster.status.DELETING:
+        elif cluster.status.state == cluster.status.State.DELETING:
             # Wait for cluster to be deleted
             self._wait_for_cluster_in_deleting_state(hook)
             # Create new cluster
             cluster = self._create_cluster(hook)
             self._handle_error_state(hook, cluster)
 
-        return MessageToDict(cluster)
+        return Cluster.to_dict(cluster)
 
 
 class DataprocScaleClusterOperator(BaseOperator):
@@ -1790,7 +1786,7 @@ class DataprocSubmitJobOperator(BaseOperator):
     :type wait_timeout: int
     """
 
-    template_fields = ('project_id', 'location', 'job', 'impersonation_chain')
+    template_fields = ('project_id', 'location', 'job', 'impersonation_chain', 'request_id')
     template_fields_renderers = {"job": "json"}
 
     @apply_defaults
@@ -1876,14 +1872,14 @@ class DataprocUpdateClusterOperator(BaseOperator):
         example, to change the number of workers in a cluster to 5, the ``update_mask`` parameter would be
         specified as ``config.worker_config.num_instances``, and the ``PATCH`` request body would specify the
         new value. If a dict is provided, it must be of the same form as the protobuf message
-        :class:`~google.cloud.dataproc_v1beta2.types.FieldMask`
-    :type update_mask: Union[Dict, google.cloud.dataproc_v1beta2.types.FieldMask]
+        :class:`~google.protobuf.field_mask_pb2.FieldMask`
+    :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
     :param graceful_decommission_timeout: Optional. Timeout for graceful YARN decommissioning. Graceful
         decommissioning allows removing nodes from the cluster without interrupting jobs in progress. Timeout
         specifies how long to wait for jobs in progress to finish before forcefully removing nodes (and
         potentially interrupting jobs). Default timeout is 0 (for forceful decommission), and the maximum
         allowed timeout is 1 day.
-    :type graceful_decommission_timeout: Union[Dict, google.cloud.dataproc_v1beta2.types.Duration]
+    :type graceful_decommission_timeout: Union[Dict, google.protobuf.duration_pb2.Duration]
     :param request_id: Optional. A unique id used to identify the request. If the server receives two
         ``UpdateClusterRequest`` requests with the same id, then the second request will be ignored and the
         first ``google.longrunning.Operation`` created and stored in the backend is returned.
@@ -1909,7 +1905,7 @@ class DataprocUpdateClusterOperator(BaseOperator):
     :type impersonation_chain: Union[str, Sequence[str]]
     """
 
-    template_fields = ('impersonation_chain',)
+    template_fields = ('impersonation_chain', 'cluster_name')
 
     @apply_defaults
     def __init__(  # pylint: disable=too-many-arguments
diff --git a/airflow/providers/google/cloud/sensors/dataproc.py b/airflow/providers/google/cloud/sensors/dataproc.py
index 1777a22..93656df 100644
--- a/airflow/providers/google/cloud/sensors/dataproc.py
+++ b/airflow/providers/google/cloud/sensors/dataproc.py
@@ -65,14 +65,18 @@ class DataprocJobSensor(BaseSensorOperator):
         job = hook.get_job(job_id=self.dataproc_job_id, location=self.location, project_id=self.project_id)
         state = job.status.state
 
-        if state == JobStatus.ERROR:
+        if state == JobStatus.State.ERROR:
             raise AirflowException(f'Job failed:\n{job}')
-        elif state in {JobStatus.CANCELLED, JobStatus.CANCEL_PENDING, JobStatus.CANCEL_STARTED}:
+        elif state in {
+            JobStatus.State.CANCELLED,
+            JobStatus.State.CANCEL_PENDING,
+            JobStatus.State.CANCEL_STARTED,
+        }:
             raise AirflowException(f'Job was cancelled:\n{job}')
-        elif JobStatus.DONE == state:
+        elif JobStatus.State.DONE == state:
             self.log.debug("Job %s completed successfully.", self.dataproc_job_id)
             return True
-        elif JobStatus.ATTEMPT_FAILURE == state:
+        elif JobStatus.State.ATTEMPT_FAILURE == state:
             self.log.debug("Job %s attempt has failed.", self.dataproc_job_id)
 
         self.log.info("Waiting for job %s to complete.", self.dataproc_job_id)
diff --git a/setup.py b/setup.py
index 520b059..0f40d88 100644
--- a/setup.py
+++ b/setup.py
@@ -288,7 +288,7 @@ google = [
     'google-cloud-bigtable>=1.0.0,<2.0.0',
     'google-cloud-container>=0.1.1,<2.0.0',
     'google-cloud-datacatalog>=3.0.0,<4.0.0',
-    'google-cloud-dataproc>=1.0.1,<2.0.0',
+    'google-cloud-dataproc>=2.2.0,<3.0.0',
     'google-cloud-dlp>=0.11.0,<2.0.0',
     'google-cloud-kms>=2.0.0,<3.0.0',
     'google-cloud-language>=1.1.1,<2.0.0',
diff --git a/tests/providers/google/cloud/hooks/test_dataproc.py b/tests/providers/google/cloud/hooks/test_dataproc.py
index d09c91e..6842acc 100644
--- a/tests/providers/google/cloud/hooks/test_dataproc.py
+++ b/tests/providers/google/cloud/hooks/test_dataproc.py
@@ -20,7 +20,7 @@ import unittest
 from unittest import mock
 
 import pytest
-from google.cloud.dataproc_v1beta2.types import JobStatus  # pylint: disable=no-name-in-module
+from google.cloud.dataproc_v1beta2 import JobStatus  # pylint: disable=no-name-in-module
 
 from airflow.exceptions import AirflowException
 from airflow.providers.google.cloud.hooks.dataproc import DataprocHook, DataProcJobBuilder
@@ -43,8 +43,6 @@ CLUSTER = {
     "project_id": GCP_PROJECT,
 }
 
-PARENT = "parent"
-NAME = "name"
 BASE_STRING = "airflow.providers.google.common.hooks.base_google.{}"
 DATAPROC_STRING = "airflow.providers.google.cloud.hooks.dataproc.{}"
 
@@ -113,11 +111,13 @@ class TestDataprocHook(unittest.TestCase):
         )
         mock_client.assert_called_once_with(location=GCP_LOCATION)
         mock_client.return_value.create_cluster.assert_called_once_with(
-            project_id=GCP_PROJECT,
-            region=GCP_LOCATION,
-            cluster=CLUSTER,
+            request=dict(
+                project_id=GCP_PROJECT,
+                region=GCP_LOCATION,
+                cluster=CLUSTER,
+                request_id=None,
+            ),
             metadata=None,
-            request_id=None,
             retry=None,
             timeout=None,
         )
@@ -127,12 +127,14 @@ class TestDataprocHook(unittest.TestCase):
         self.hook.delete_cluster(project_id=GCP_PROJECT, region=GCP_LOCATION, cluster_name=CLUSTER_NAME)
         mock_client.assert_called_once_with(location=GCP_LOCATION)
         mock_client.return_value.delete_cluster.assert_called_once_with(
-            project_id=GCP_PROJECT,
-            region=GCP_LOCATION,
-            cluster_name=CLUSTER_NAME,
-            cluster_uuid=None,
+            request=dict(
+                project_id=GCP_PROJECT,
+                region=GCP_LOCATION,
+                cluster_name=CLUSTER_NAME,
+                cluster_uuid=None,
+                request_id=None,
+            ),
             metadata=None,
-            request_id=None,
             retry=None,
             timeout=None,
         )
@@ -142,9 +144,11 @@ class TestDataprocHook(unittest.TestCase):
         self.hook.diagnose_cluster(project_id=GCP_PROJECT, region=GCP_LOCATION, cluster_name=CLUSTER_NAME)
         mock_client.assert_called_once_with(location=GCP_LOCATION)
         mock_client.return_value.diagnose_cluster.assert_called_once_with(
-            project_id=GCP_PROJECT,
-            region=GCP_LOCATION,
-            cluster_name=CLUSTER_NAME,
+            request=dict(
+                project_id=GCP_PROJECT,
+                region=GCP_LOCATION,
+                cluster_name=CLUSTER_NAME,
+            ),
             metadata=None,
             retry=None,
             timeout=None,
@@ -156,9 +160,11 @@ class TestDataprocHook(unittest.TestCase):
         self.hook.get_cluster(project_id=GCP_PROJECT, region=GCP_LOCATION, cluster_name=CLUSTER_NAME)
         mock_client.assert_called_once_with(location=GCP_LOCATION)
         mock_client.return_value.get_cluster.assert_called_once_with(
-            project_id=GCP_PROJECT,
-            region=GCP_LOCATION,
-            cluster_name=CLUSTER_NAME,
+            request=dict(
+                project_id=GCP_PROJECT,
+                region=GCP_LOCATION,
+                cluster_name=CLUSTER_NAME,
+            ),
             metadata=None,
             retry=None,
             timeout=None,
@@ -171,10 +177,12 @@ class TestDataprocHook(unittest.TestCase):
         self.hook.list_clusters(project_id=GCP_PROJECT, region=GCP_LOCATION, filter_=filter_)
         mock_client.assert_called_once_with(location=GCP_LOCATION)
         mock_client.return_value.list_clusters.assert_called_once_with(
-            project_id=GCP_PROJECT,
-            region=GCP_LOCATION,
-            filter_=filter_,
-            page_size=None,
+            request=dict(
+                project_id=GCP_PROJECT,
+                region=GCP_LOCATION,
+                filter=filter_,
+                page_size=None,
+            ),
             metadata=None,
             retry=None,
             timeout=None,
@@ -192,14 +200,16 @@ class TestDataprocHook(unittest.TestCase):
         )
         mock_client.assert_called_once_with(location=GCP_LOCATION)
         mock_client.return_value.update_cluster.assert_called_once_with(
-            project_id=GCP_PROJECT,
-            region=GCP_LOCATION,
-            cluster=CLUSTER,
-            cluster_name=CLUSTER_NAME,
-            update_mask=update_mask,
-            graceful_decommission_timeout=None,
+            request=dict(
+                project_id=GCP_PROJECT,
+                region=GCP_LOCATION,
+                cluster=CLUSTER,
+                cluster_name=CLUSTER_NAME,
+                update_mask=update_mask,
+                graceful_decommission_timeout=None,
+                request_id=None,
+            ),
             metadata=None,
-            request_id=None,
             retry=None,
             timeout=None,
         )
@@ -207,44 +217,45 @@ class TestDataprocHook(unittest.TestCase):
     @mock.patch(DATAPROC_STRING.format("DataprocHook.get_template_client"))
     def test_create_workflow_template(self, mock_client):
         template = {"test": "test"}
-        mock_client.return_value.region_path.return_value = PARENT
+        parent = f'projects/{GCP_PROJECT}/regions/{GCP_LOCATION}'
         self.hook.create_workflow_template(location=GCP_LOCATION, template=template, project_id=GCP_PROJECT)
-        mock_client.return_value.region_path.assert_called_once_with(GCP_PROJECT, GCP_LOCATION)
         mock_client.return_value.create_workflow_template.assert_called_once_with(
-            parent=PARENT, template=template, retry=None, timeout=None, metadata=None
+            request=dict(parent=parent, template=template), retry=None, timeout=None, metadata=()
         )
 
     @mock.patch(DATAPROC_STRING.format("DataprocHook.get_template_client"))
     def test_instantiate_workflow_template(self, mock_client):
         template_name = "template_name"
-        mock_client.return_value.workflow_template_path.return_value = NAME
+        name = f'projects/{GCP_PROJECT}/regions/{GCP_LOCATION}/workflowTemplates/{template_name}'
         self.hook.instantiate_workflow_template(
             location=GCP_LOCATION, template_name=template_name, project_id=GCP_PROJECT
         )
-        mock_client.return_value.workflow_template_path.assert_called_once_with(
-            GCP_PROJECT, GCP_LOCATION, template_name
-        )
         mock_client.return_value.instantiate_workflow_template.assert_called_once_with(
-            name=NAME, version=None, parameters=None, request_id=None, retry=None, timeout=None, metadata=None
+            request=dict(name=name, version=None, parameters=None, request_id=None),
+            retry=None,
+            timeout=None,
+            metadata=(),
         )
 
     @mock.patch(DATAPROC_STRING.format("DataprocHook.get_template_client"))
     def test_instantiate_inline_workflow_template(self, mock_client):
         template = {"test": "test"}
-        mock_client.return_value.region_path.return_value = PARENT
+        parent = f'projects/{GCP_PROJECT}/regions/{GCP_LOCATION}'
         self.hook.instantiate_inline_workflow_template(
             location=GCP_LOCATION, template=template, project_id=GCP_PROJECT
         )
-        mock_client.return_value.region_path.assert_called_once_with(GCP_PROJECT, GCP_LOCATION)
         mock_client.return_value.instantiate_inline_workflow_template.assert_called_once_with(
-            parent=PARENT, template=template, request_id=None, retry=None, timeout=None, metadata=None
+            request=dict(parent=parent, template=template, request_id=None),
+            retry=None,
+            timeout=None,
+            metadata=(),
         )
 
     @mock.patch(DATAPROC_STRING.format("DataprocHook.get_job"))
     def test_wait_for_job(self, mock_get_job):
         mock_get_job.side_effect = [
-            mock.MagicMock(status=mock.MagicMock(state=JobStatus.RUNNING)),
-            mock.MagicMock(status=mock.MagicMock(state=JobStatus.ERROR)),
+            mock.MagicMock(status=mock.MagicMock(state=JobStatus.State.RUNNING)),
+            mock.MagicMock(status=mock.MagicMock(state=JobStatus.State.ERROR)),
         ]
         with pytest.raises(AirflowException):
             self.hook.wait_for_job(job_id=JOB_ID, location=GCP_LOCATION, project_id=GCP_PROJECT, wait_time=0)
@@ -259,9 +270,11 @@ class TestDataprocHook(unittest.TestCase):
         self.hook.get_job(location=GCP_LOCATION, job_id=JOB_ID, project_id=GCP_PROJECT)
         mock_client.assert_called_once_with(location=GCP_LOCATION)
         mock_client.return_value.get_job.assert_called_once_with(
-            region=GCP_LOCATION,
-            job_id=JOB_ID,
-            project_id=GCP_PROJECT,
+            request=dict(
+                region=GCP_LOCATION,
+                job_id=JOB_ID,
+                project_id=GCP_PROJECT,
+            ),
             retry=None,
             timeout=None,
             metadata=None,
@@ -272,10 +285,12 @@ class TestDataprocHook(unittest.TestCase):
         self.hook.submit_job(location=GCP_LOCATION, job=JOB, project_id=GCP_PROJECT)
         mock_client.assert_called_once_with(location=GCP_LOCATION)
         mock_client.return_value.submit_job.assert_called_once_with(
-            region=GCP_LOCATION,
-            job=JOB,
-            project_id=GCP_PROJECT,
-            request_id=None,
+            request=dict(
+                region=GCP_LOCATION,
+                job=JOB,
+                project_id=GCP_PROJECT,
+                request_id=None,
+            ),
             retry=None,
             timeout=None,
             metadata=None,
@@ -297,9 +312,11 @@ class TestDataprocHook(unittest.TestCase):
         self.hook.cancel_job(location=GCP_LOCATION, job_id=JOB_ID, project_id=GCP_PROJECT)
         mock_client.assert_called_once_with(location=GCP_LOCATION)
         mock_client.return_value.cancel_job.assert_called_once_with(
-            region=GCP_LOCATION,
-            job_id=JOB_ID,
-            project_id=GCP_PROJECT,
+            request=dict(
+                region=GCP_LOCATION,
+                job_id=JOB_ID,
+                project_id=GCP_PROJECT,
+            ),
             retry=None,
             timeout=None,
             metadata=None,
@@ -311,9 +328,11 @@ class TestDataprocHook(unittest.TestCase):
             self.hook.cancel_job(job_id=JOB_ID, project_id=GCP_PROJECT)
         mock_client.assert_called_once_with(location='global')
         mock_client.return_value.cancel_job.assert_called_once_with(
-            region='global',
-            job_id=JOB_ID,
-            project_id=GCP_PROJECT,
+            request=dict(
+                region='global',
+                job_id=JOB_ID,
+                project_id=GCP_PROJECT,
+            ),
             retry=None,
             timeout=None,
             metadata=None,
diff --git a/tests/providers/google/cloud/operators/test_dataproc.py b/tests/providers/google/cloud/operators/test_dataproc.py
index ca8f706..791e8ea 100644
--- a/tests/providers/google/cloud/operators/test_dataproc.py
+++ b/tests/providers/google/cloud/operators/test_dataproc.py
@@ -204,8 +204,9 @@ class TestDataprocClusterCreateOperator(unittest.TestCase):
         assert_warning("Default region value", warning)
         self.assertEqual(op_default_region.region, 'global')
 
+    @mock.patch(DATAPROC_PATH.format("Cluster.to_dict"))
     @mock.patch(DATAPROC_PATH.format("DataprocHook"))
-    def test_execute(self, mock_hook):
+    def test_execute(self, mock_hook, to_dict_mock):
         op = DataprocCreateClusterOperator(
             task_id=TASK_ID,
             region=GCP_LOCATION,
@@ -233,9 +234,11 @@ class TestDataprocClusterCreateOperator(unittest.TestCase):
             timeout=TIMEOUT,
             metadata=METADATA,
         )
+        to_dict_mock.assert_called_once_with(mock_hook().create_cluster().result())
 
+    @mock.patch(DATAPROC_PATH.format("Cluster.to_dict"))
     @mock.patch(DATAPROC_PATH.format("DataprocHook"))
-    def test_execute_if_cluster_exists(self, mock_hook):
+    def test_execute_if_cluster_exists(self, mock_hook, to_dict_mock):
         mock_hook.return_value.create_cluster.side_effect = [AlreadyExists("test")]
         mock_hook.return_value.get_cluster.return_value.status.state = 0
         op = DataprocCreateClusterOperator(
@@ -273,6 +276,7 @@ class TestDataprocClusterCreateOperator(unittest.TestCase):
             timeout=TIMEOUT,
             metadata=METADATA,
         )
+        to_dict_mock.assert_called_once_with(mock_hook.return_value.get_cluster.return_value)
 
     @mock.patch(DATAPROC_PATH.format("DataprocHook"))
     def test_execute_if_cluster_exists_do_not_use(self, mock_hook):
@@ -300,7 +304,7 @@ class TestDataprocClusterCreateOperator(unittest.TestCase):
         mock_hook.return_value.create_cluster.side_effect = [AlreadyExists("test")]
         cluster_status = mock_hook.return_value.get_cluster.return_value.status
         cluster_status.state = 0
-        cluster_status.ERROR = 0
+        cluster_status.State.ERROR = 0
 
         op = DataprocCreateClusterOperator(
             task_id=TASK_ID,
@@ -335,11 +339,11 @@ class TestDataprocClusterCreateOperator(unittest.TestCase):
     ):
         cluster = mock.MagicMock()
         cluster.status.state = 0
-        cluster.status.DELETING = 0
+        cluster.status.State.DELETING = 0  # pylint: disable=no-member
 
         cluster2 = mock.MagicMock()
         cluster2.status.state = 0
-        cluster2.status.ERROR = 0
+        cluster2.status.State.ERROR = 0  # pylint: disable=no-member
 
         mock_create_cluster.side_effect = [AlreadyExists("test"), cluster2]
         mock_generator.return_value = [0]
diff --git a/tests/providers/google/cloud/sensors/test_dataproc.py b/tests/providers/google/cloud/sensors/test_dataproc.py
index 1ce8eea..6f2991a 100644
--- a/tests/providers/google/cloud/sensors/test_dataproc.py
+++ b/tests/providers/google/cloud/sensors/test_dataproc.py
@@ -45,7 +45,7 @@ class TestDataprocJobSensor(unittest.TestCase):
 
     @mock.patch(DATAPROC_PATH.format("DataprocHook"))
     def test_done(self, mock_hook):
-        job = self.create_job(JobStatus.DONE)
+        job = self.create_job(JobStatus.State.DONE)
         job_id = "job_id"
         mock_hook.return_value.get_job.return_value = job
 
@@ -66,7 +66,7 @@ class TestDataprocJobSensor(unittest.TestCase):
 
     @mock.patch(DATAPROC_PATH.format("DataprocHook"))
     def test_error(self, mock_hook):
-        job = self.create_job(JobStatus.ERROR)
+        job = self.create_job(JobStatus.State.ERROR)
         job_id = "job_id"
         mock_hook.return_value.get_job.return_value = job
 
@@ -88,7 +88,7 @@ class TestDataprocJobSensor(unittest.TestCase):
 
     @mock.patch(DATAPROC_PATH.format("DataprocHook"))
     def test_wait(self, mock_hook):
-        job = self.create_job(JobStatus.RUNNING)
+        job = self.create_job(JobStatus.State.RUNNING)
         job_id = "job_id"
         mock_hook.return_value.get_job.return_value = job
 
@@ -109,7 +109,7 @@ class TestDataprocJobSensor(unittest.TestCase):
 
     @mock.patch(DATAPROC_PATH.format("DataprocHook"))
     def test_cancelled(self, mock_hook):
-        job = self.create_job(JobStatus.CANCELLED)
+        job = self.create_job(JobStatus.State.CANCELLED)
         job_id = "job_id"
         mock_hook.return_value.get_job.return_value = job
 


[airflow] 23/41: Add timeout option to gcs hook methods. (#13156)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 7c5ce8b0c82f6b937bded81ceb96114f8ff6ea10
Author: Joshua Carp <jm...@gmail.com>
AuthorDate: Thu Dec 24 08:12:06 2020 -0500

    Add timeout option to gcs hook methods. (#13156)
    
    (cherry picked from commit 323084e97ddacbc5512709bf0cad8f53082d16b0)
---
 airflow/providers/google/cloud/hooks/gcs.py    | 30 ++++++++++++++++++++------
 setup.py                                       |  2 +-
 tests/providers/google/cloud/hooks/test_gcs.py | 14 ++++++------
 3 files changed, 32 insertions(+), 14 deletions(-)

diff --git a/airflow/providers/google/cloud/hooks/gcs.py b/airflow/providers/google/cloud/hooks/gcs.py
index 0ca3961..72a23ea 100644
--- a/airflow/providers/google/cloud/hooks/gcs.py
+++ b/airflow/providers/google/cloud/hooks/gcs.py
@@ -40,6 +40,9 @@ from airflow.version import version
 RT = TypeVar('RT')  # pylint: disable=invalid-name
 T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
 
+# Use default timeout from google-cloud-storage
+DEFAULT_TIMEOUT = 60
+
 
 def _fallback_object_url_to_object_name_and_bucket_name(
     object_url_keyword_arg_name='object_url',
@@ -257,7 +260,12 @@ class GCSHook(GoogleBaseHook):
         )
 
     def download(
-        self, object_name: str, bucket_name: Optional[str], filename: Optional[str] = None
+        self,
+        object_name: str,
+        bucket_name: Optional[str],
+        filename: Optional[str] = None,
+        chunk_size: Optional[int] = None,
+        timeout: Optional[int] = DEFAULT_TIMEOUT,
     ) -> Union[str, bytes]:
         """
         Downloads a file from Google Cloud Storage.
@@ -273,16 +281,20 @@ class GCSHook(GoogleBaseHook):
         :type object_name: str
         :param filename: If set, a local file path where the file should be written to.
         :type filename: str
+        :param chunk_size: Blob chunk size.
+        :type chunk_size: int
+        :param timeout: Request timeout in seconds.
+        :type timeout: int
         """
         # TODO: future improvement check file size before downloading,
         #  to check for local space availability
 
         client = self.get_conn()
         bucket = client.bucket(bucket_name)
-        blob = bucket.blob(blob_name=object_name)
+        blob = bucket.blob(blob_name=object_name, chunk_size=chunk_size)
 
         if filename:
-            blob.download_to_filename(filename)
+            blob.download_to_filename(filename, timeout=timeout)
             self.log.info('File downloaded to %s', filename)
             return filename
         else:
@@ -359,6 +371,8 @@ class GCSHook(GoogleBaseHook):
         mime_type: Optional[str] = None,
         gzip: bool = False,
         encoding: str = 'utf-8',
+        chunk_size: Optional[int] = None,
+        timeout: Optional[int] = DEFAULT_TIMEOUT,
     ) -> None:
         """
         Uploads a local file or file data as string or bytes to Google Cloud Storage.
@@ -377,10 +391,14 @@ class GCSHook(GoogleBaseHook):
         :type gzip: bool
         :param encoding: bytes encoding for file data if provided as string
         :type encoding: str
+        :param chunk_size: Blob chunk size.
+        :type chunk_size: int
+        :param timeout: Request timeout in seconds.
+        :type timeout: int
         """
         client = self.get_conn()
         bucket = client.bucket(bucket_name)
-        blob = bucket.blob(blob_name=object_name)
+        blob = bucket.blob(blob_name=object_name, chunk_size=chunk_size)
         if filename and data:
             raise ValueError(
                 "'filename' and 'data' parameter provided. Please "
@@ -398,7 +416,7 @@ class GCSHook(GoogleBaseHook):
                         shutil.copyfileobj(f_in, f_out)
                         filename = filename_gz
 
-            blob.upload_from_filename(filename=filename, content_type=mime_type)
+            blob.upload_from_filename(filename=filename, content_type=mime_type, timeout=timeout)
             if gzip:
                 os.remove(filename)
             self.log.info('File %s uploaded to %s in %s bucket', filename, object_name, bucket_name)
@@ -412,7 +430,7 @@ class GCSHook(GoogleBaseHook):
                 with gz.GzipFile(fileobj=out, mode="w") as f:
                     f.write(data)
                 data = out.getvalue()
-            blob.upload_from_string(data, content_type=mime_type)
+            blob.upload_from_string(data, content_type=mime_type, timeout=timeout)
             self.log.info('Data stream uploaded to %s in %s bucket', object_name, bucket_name)
         else:
             raise ValueError("'filename' and 'data' parameter missing. One is required to upload to gcs.")
diff --git a/setup.py b/setup.py
index ae18e57..3df9e47 100644
--- a/setup.py
+++ b/setup.py
@@ -301,7 +301,7 @@ google = [
     'google-cloud-secret-manager>=0.2.0,<2.0.0',
     'google-cloud-spanner>=1.10.0,<2.0.0',
     'google-cloud-speech>=0.36.3,<2.0.0',
-    'google-cloud-storage>=1.16,<2.0.0',
+    'google-cloud-storage>=1.30,<2.0.0',
     'google-cloud-tasks>=1.2.1,<2.0.0',
     'google-cloud-texttospeech>=0.4.0,<2.0.0',
     'google-cloud-translate>=1.5.0,<2.0.0',
diff --git a/tests/providers/google/cloud/hooks/test_gcs.py b/tests/providers/google/cloud/hooks/test_gcs.py
index dffe5ad..1ce44bb 100644
--- a/tests/providers/google/cloud/hooks/test_gcs.py
+++ b/tests/providers/google/cloud/hooks/test_gcs.py
@@ -672,7 +672,7 @@ class TestGCSHook(unittest.TestCase):
         )
 
         self.assertEqual(response, test_file)
-        download_filename_method.assert_called_once_with(test_file)
+        download_filename_method.assert_called_once_with(test_file, timeout=60)
 
     @mock.patch(GCS_STRING.format('NamedTemporaryFile'))
     @mock.patch(GCS_STRING.format('GCSHook.get_conn'))
@@ -697,7 +697,7 @@ class TestGCSHook(unittest.TestCase):
         with self.gcs_hook.provide_file(bucket_name=test_bucket, object_name=test_object) as response:
 
             self.assertEqual(test_file, response.name)
-        download_filename_method.assert_called_once_with(test_file)
+        download_filename_method.assert_called_once_with(test_file, timeout=60)
         mock_temp_file.assert_has_calls(
             [
                 mock.call(suffix='test_object'),
@@ -762,7 +762,7 @@ class TestGCSHookUpload(unittest.TestCase):
         self.gcs_hook.upload(test_bucket, test_object, filename=self.testfile.name)
 
         upload_method.assert_called_once_with(
-            filename=self.testfile.name, content_type='application/octet-stream'
+            filename=self.testfile.name, content_type='application/octet-stream', timeout=60
         )
 
     @mock.patch(GCS_STRING.format('GCSHook.get_conn'))
@@ -782,7 +782,7 @@ class TestGCSHookUpload(unittest.TestCase):
 
         self.gcs_hook.upload(test_bucket, test_object, data=self.testdata_str)
 
-        upload_method.assert_called_once_with(self.testdata_str, content_type='text/plain')
+        upload_method.assert_called_once_with(self.testdata_str, content_type='text/plain', timeout=60)
 
     @mock.patch(GCS_STRING.format('GCSHook.get_conn'))
     def test_upload_data_bytes(self, mock_service):
@@ -793,7 +793,7 @@ class TestGCSHookUpload(unittest.TestCase):
 
         self.gcs_hook.upload(test_bucket, test_object, data=self.testdata_bytes)
 
-        upload_method.assert_called_once_with(self.testdata_bytes, content_type='text/plain')
+        upload_method.assert_called_once_with(self.testdata_bytes, content_type='text/plain', timeout=60)
 
     @mock.patch(GCS_STRING.format('BytesIO'))
     @mock.patch(GCS_STRING.format('gz.GzipFile'))
@@ -812,7 +812,7 @@ class TestGCSHookUpload(unittest.TestCase):
         byte_str = bytes(self.testdata_str, encoding)
         mock_gzip.assert_called_once_with(fileobj=mock_bytes_io.return_value, mode="w")
         gzip_ctx.write.assert_called_once_with(byte_str)
-        upload_method.assert_called_once_with(data, content_type='text/plain')
+        upload_method.assert_called_once_with(data, content_type='text/plain', timeout=60)
 
     @mock.patch(GCS_STRING.format('BytesIO'))
     @mock.patch(GCS_STRING.format('gz.GzipFile'))
@@ -829,7 +829,7 @@ class TestGCSHookUpload(unittest.TestCase):
 
         mock_gzip.assert_called_once_with(fileobj=mock_bytes_io.return_value, mode="w")
         gzip_ctx.write.assert_called_once_with(self.testdata_bytes)
-        upload_method.assert_called_once_with(data, content_type='text/plain')
+        upload_method.assert_called_once_with(data, content_type='text/plain', timeout=60)
 
     @mock.patch(GCS_STRING.format('GCSHook.get_conn'))
     def test_upload_exceptions(self, mock_service):


[airflow] 20/41: Update compatibility with google-cloud-kms>=2.0 (#13124)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit c2c85ddd84e69813d0729aae18d82eadba8f3740
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Tue Dec 22 12:59:27 2020 +0100

    Update compatibility with google-cloud-kms>=2.0 (#13124)
    
    (cherry picked from commit b26b0df5b03c4cd826fd7b2dff5771d64e18e6b7)
---
 airflow/providers/google/cloud/hooks/kms.py    | 20 +++++++------
 setup.py                                       |  2 +-
 tests/providers/google/cloud/hooks/test_kms.py | 40 +++++++++++++++-----------
 3 files changed, 37 insertions(+), 25 deletions(-)

diff --git a/airflow/providers/google/cloud/hooks/kms.py b/airflow/providers/google/cloud/hooks/kms.py
index e63c2f1..3fd1433 100644
--- a/airflow/providers/google/cloud/hooks/kms.py
+++ b/airflow/providers/google/cloud/hooks/kms.py
@@ -118,12 +118,14 @@ class CloudKMSHook(GoogleBaseHook):
         :rtype: str
         """
         response = self.get_conn().encrypt(
-            name=key_name,
-            plaintext=plaintext,
-            additional_authenticated_data=authenticated_data,
+            request={
+                'name': key_name,
+                'plaintext': plaintext,
+                'additional_authenticated_data': authenticated_data,
+            },
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
         ciphertext = _b64encode(response.ciphertext)
@@ -161,12 +163,14 @@ class CloudKMSHook(GoogleBaseHook):
         :rtype: bytes
         """
         response = self.get_conn().decrypt(
-            name=key_name,
-            ciphertext=_b64decode(ciphertext),
-            additional_authenticated_data=authenticated_data,
+            request={
+                'name': key_name,
+                'ciphertext': _b64decode(ciphertext),
+                'additional_authenticated_data': authenticated_data,
+            },
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
         return response.plaintext
diff --git a/setup.py b/setup.py
index 63dd6d7..1ec4f5d 100644
--- a/setup.py
+++ b/setup.py
@@ -290,7 +290,7 @@ google = [
     'google-cloud-datacatalog>=1.0.0,<2.0.0',
     'google-cloud-dataproc>=1.0.1,<2.0.0',
     'google-cloud-dlp>=0.11.0,<2.0.0',
-    'google-cloud-kms>=1.2.1,<2.0.0',
+    'google-cloud-kms>=2.0.0,<3.0.0',
     'google-cloud-language>=1.1.1,<2.0.0',
     'google-cloud-logging>=1.14.0,<2.0.0',
     'google-cloud-memcache>=0.2.0',
diff --git a/tests/providers/google/cloud/hooks/test_kms.py b/tests/providers/google/cloud/hooks/test_kms.py
index 6b87e3c..4de1dfb 100644
--- a/tests/providers/google/cloud/hooks/test_kms.py
+++ b/tests/providers/google/cloud/hooks/test_kms.py
@@ -82,12 +82,14 @@ class TestCloudKMSHook(unittest.TestCase):
         result = self.kms_hook.encrypt(TEST_KEY_ID, PLAINTEXT)
         mock_get_conn.assert_called_once_with()
         mock_get_conn.return_value.encrypt.assert_called_once_with(
-            name=TEST_KEY_ID,
-            plaintext=PLAINTEXT,
-            additional_authenticated_data=None,
+            request=dict(
+                name=TEST_KEY_ID,
+                plaintext=PLAINTEXT,
+                additional_authenticated_data=None,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
         assert PLAINTEXT_b64 == result
 
@@ -97,12 +99,14 @@ class TestCloudKMSHook(unittest.TestCase):
         result = self.kms_hook.encrypt(TEST_KEY_ID, PLAINTEXT, AUTH_DATA)
         mock_get_conn.assert_called_once_with()
         mock_get_conn.return_value.encrypt.assert_called_once_with(
-            name=TEST_KEY_ID,
-            plaintext=PLAINTEXT,
-            additional_authenticated_data=AUTH_DATA,
+            request=dict(
+                name=TEST_KEY_ID,
+                plaintext=PLAINTEXT,
+                additional_authenticated_data=AUTH_DATA,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
         assert PLAINTEXT_b64 == result
 
@@ -112,12 +116,14 @@ class TestCloudKMSHook(unittest.TestCase):
         result = self.kms_hook.decrypt(TEST_KEY_ID, CIPHERTEXT_b64)
         mock_get_conn.assert_called_once_with()
         mock_get_conn.return_value.decrypt.assert_called_once_with(
-            name=TEST_KEY_ID,
-            ciphertext=CIPHERTEXT,
-            additional_authenticated_data=None,
+            request=dict(
+                name=TEST_KEY_ID,
+                ciphertext=CIPHERTEXT,
+                additional_authenticated_data=None,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
         assert PLAINTEXT == result
 
@@ -127,11 +133,13 @@ class TestCloudKMSHook(unittest.TestCase):
         result = self.kms_hook.decrypt(TEST_KEY_ID, CIPHERTEXT_b64, AUTH_DATA)
         mock_get_conn.assert_called_once_with()
         mock_get_conn.return_value.decrypt.assert_called_once_with(
-            name=TEST_KEY_ID,
-            ciphertext=CIPHERTEXT,
-            additional_authenticated_data=AUTH_DATA,
+            request=dict(
+                name=TEST_KEY_ID,
+                ciphertext=CIPHERTEXT,
+                additional_authenticated_data=AUTH_DATA,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
         assert PLAINTEXT == result


[airflow] 34/41: Limits Sphinx to <3.5.0 (#14238)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 68b2d9e5cbaaa8318309bed654ead3bd7c37e696
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon Feb 15 14:05:49 2021 +0100

    Limits Sphinx to <3.5.0 (#14238)
    
    Sphinx 3.5.0 released on 14th of Feb introduced a problem in our
    doc builds.
    
    It is documented in https://github.com/sphinx-doc/sphinx/issues/8880
    
    Until this problem is solved we are limiting Sphinx.
    
    (cherry picked from commit da80b69812b12377efddf5ad9763ee09f89a9f31)
---
 setup.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/setup.py b/setup.py
index a752d82..92eb113 100644
--- a/setup.py
+++ b/setup.py
@@ -245,7 +245,8 @@ datadog = [
     'datadog>=0.14.0',
 ]
 doc = [
-    'sphinx>=2.1.2',
+    # Sphinx is limited to < 3.5.0 because of https://github.com/sphinx-doc/sphinx/issues/8880
+    'sphinx>=2.1.2, <3.5.0',
     f'sphinx-airflow-theme{get_sphinx_theme_version()}',
     'sphinx-argparse>=0.1.13',
     'sphinx-autoapi==1.0.0',


[airflow] 17/41: Add Google Cloud Workflows Operators (#13366)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 3f5cc0cfd83ee0c8a5bc91ddaedc8f2ac7f10d29
Author: Tomek Urbaszek <tu...@gmail.com>
AuthorDate: Thu Jan 28 20:35:09 2021 +0100

    Add Google Cloud Workflows Operators (#13366)
    
    Add Google Cloud Workflows Operators, system test, example and sensor
    
    Co-authored-by: Tobiasz Kędzierski <to...@polidea.com>
    (cherry picked from commit 6d6588fe2b8bb5fa33e930646d963df3e0530f23)
---
 .../google/cloud/example_dags/example_workflows.py | 197 ++++++
 airflow/providers/google/cloud/hooks/workflows.py  | 401 ++++++++++++
 .../providers/google/cloud/operators/workflows.py  | 714 +++++++++++++++++++++
 .../providers/google/cloud/sensors/workflows.py    | 123 ++++
 airflow/providers/google/provider.yaml             |  14 +
 .../operators/cloud/workflows.rst                  | 185 ++++++
 setup.py                                           |   2 +
 .../providers/google/cloud/hooks/test_workflows.py | 256 ++++++++
 .../google/cloud/operators/test_workflows.py       | 383 +++++++++++
 .../cloud/operators/test_workflows_system.py       |  29 +
 .../google/cloud/sensors/test_workflows.py         | 108 ++++
 .../google/cloud/utils/gcp_authenticator.py        |   1 +
 12 files changed, 2413 insertions(+)

diff --git a/airflow/providers/google/cloud/example_dags/example_workflows.py b/airflow/providers/google/cloud/example_dags/example_workflows.py
new file mode 100644
index 0000000..0fab435
--- /dev/null
+++ b/airflow/providers/google/cloud/example_dags/example_workflows.py
@@ -0,0 +1,197 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import os
+
+from airflow import DAG
+from airflow.providers.google.cloud.operators.workflows import (
+    WorkflowsCancelExecutionOperator,
+    WorkflowsCreateExecutionOperator,
+    WorkflowsCreateWorkflowOperator,
+    WorkflowsDeleteWorkflowOperator,
+    WorkflowsGetExecutionOperator,
+    WorkflowsGetWorkflowOperator,
+    WorkflowsListExecutionsOperator,
+    WorkflowsListWorkflowsOperator,
+    WorkflowsUpdateWorkflowOperator,
+)
+from airflow.providers.google.cloud.sensors.workflows import WorkflowExecutionSensor
+from airflow.utils.dates import days_ago
+
+LOCATION = os.environ.get("GCP_WORKFLOWS_LOCATION", "us-central1")
+PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "an-id")
+
+WORKFLOW_ID = os.getenv("GCP_WORKFLOWS_WORKFLOW_ID", "airflow-test-workflow")
+
+# [START how_to_define_workflow]
+WORKFLOW_CONTENT = """
+- getCurrentTime:
+    call: http.get
+    args:
+        url: https://us-central1-workflowsample.cloudfunctions.net/datetime
+    result: currentTime
+- readWikipedia:
+    call: http.get
+    args:
+        url: https://en.wikipedia.org/w/api.php
+        query:
+            action: opensearch
+            search: ${currentTime.body.dayOfTheWeek}
+    result: wikiResult
+- returnResult:
+    return: ${wikiResult.body[1]}
+"""
+
+WORKFLOW = {
+    "description": "Test workflow",
+    "labels": {"airflow-version": "dev"},
+    "source_contents": WORKFLOW_CONTENT,
+}
+# [END how_to_define_workflow]
+
+EXECUTION = {"argument": ""}
+
+SLEEP_WORKFLOW_ID = os.getenv("GCP_WORKFLOWS_SLEEP_WORKFLOW_ID", "sleep_workflow")
+SLEEP_WORKFLOW_CONTENT = """
+- someSleep:
+    call: sys.sleep
+    args:
+        seconds: 120
+"""
+
+SLEEP_WORKFLOW = {
+    "description": "Test workflow",
+    "labels": {"airflow-version": "dev"},
+    "source_contents": SLEEP_WORKFLOW_CONTENT,
+}
+
+
+with DAG("example_cloud_workflows", start_date=days_ago(1), schedule_interval=None) as dag:
+    # [START how_to_create_workflow]
+    create_workflow = WorkflowsCreateWorkflowOperator(
+        task_id="create_workflow",
+        location=LOCATION,
+        project_id=PROJECT_ID,
+        workflow=WORKFLOW,
+        workflow_id=WORKFLOW_ID,
+    )
+    # [END how_to_create_workflow]
+
+    # [START how_to_update_workflow]
+    update_workflows = WorkflowsUpdateWorkflowOperator(
+        task_id="update_workflows",
+        location=LOCATION,
+        project_id=PROJECT_ID,
+        workflow_id=WORKFLOW_ID,
+        update_mask={"paths": ["name", "description"]},
+    )
+    # [END how_to_update_workflow]
+
+    # [START how_to_get_workflow]
+    get_workflow = WorkflowsGetWorkflowOperator(
+        task_id="get_workflow", location=LOCATION, project_id=PROJECT_ID, workflow_id=WORKFLOW_ID
+    )
+    # [END how_to_get_workflow]
+
+    # [START how_to_list_workflows]
+    list_workflows = WorkflowsListWorkflowsOperator(
+        task_id="list_workflows",
+        location=LOCATION,
+        project_id=PROJECT_ID,
+    )
+    # [END how_to_list_workflows]
+
+    # [START how_to_delete_workflow]
+    delete_workflow = WorkflowsDeleteWorkflowOperator(
+        task_id="delete_workflow", location=LOCATION, project_id=PROJECT_ID, workflow_id=WORKFLOW_ID
+    )
+    # [END how_to_delete_workflow]
+
+    # [START how_to_create_execution]
+    create_execution = WorkflowsCreateExecutionOperator(
+        task_id="create_execution",
+        location=LOCATION,
+        project_id=PROJECT_ID,
+        execution=EXECUTION,
+        workflow_id=WORKFLOW_ID,
+    )
+    # [END how_to_create_execution]
+
+    # [START how_to_wait_for_execution]
+    wait_for_execution = WorkflowExecutionSensor(
+        task_id="wait_for_execution",
+        location=LOCATION,
+        project_id=PROJECT_ID,
+        workflow_id=WORKFLOW_ID,
+        execution_id='{{ task_instance.xcom_pull("create_execution", key="execution_id") }}',
+    )
+    # [END how_to_wait_for_execution]
+
+    # [START how_to_get_execution]
+    get_execution = WorkflowsGetExecutionOperator(
+        task_id="get_execution",
+        location=LOCATION,
+        project_id=PROJECT_ID,
+        workflow_id=WORKFLOW_ID,
+        execution_id='{{ task_instance.xcom_pull("create_execution", key="execution_id") }}',
+    )
+    # [END how_to_get_execution]
+
+    # [START how_to_list_executions]
+    list_executions = WorkflowsListExecutionsOperator(
+        task_id="list_executions", location=LOCATION, project_id=PROJECT_ID, workflow_id=WORKFLOW_ID
+    )
+    # [END how_to_list_executions]
+
+    create_workflow_for_cancel = WorkflowsCreateWorkflowOperator(
+        task_id="create_workflow_for_cancel",
+        location=LOCATION,
+        project_id=PROJECT_ID,
+        workflow=SLEEP_WORKFLOW,
+        workflow_id=SLEEP_WORKFLOW_ID,
+    )
+
+    create_execution_for_cancel = WorkflowsCreateExecutionOperator(
+        task_id="create_execution_for_cancel",
+        location=LOCATION,
+        project_id=PROJECT_ID,
+        execution=EXECUTION,
+        workflow_id=SLEEP_WORKFLOW_ID,
+    )
+
+    # [START how_to_cancel_execution]
+    cancel_execution = WorkflowsCancelExecutionOperator(
+        task_id="cancel_execution",
+        location=LOCATION,
+        project_id=PROJECT_ID,
+        workflow_id=SLEEP_WORKFLOW_ID,
+        execution_id='{{ task_instance.xcom_pull("create_execution_for_cancel", key="execution_id") }}',
+    )
+    # [END how_to_cancel_execution]
+
+    create_workflow >> update_workflows >> [get_workflow, list_workflows]
+    update_workflows >> [create_execution, create_execution_for_cancel]
+
+    create_execution >> wait_for_execution >> [get_execution, list_executions]
+    create_workflow_for_cancel >> create_execution_for_cancel >> cancel_execution
+
+    [cancel_execution, list_executions] >> delete_workflow
+
+
+if __name__ == '__main__':
+    dag.clear(dag_run_state=None)
+    dag.run()
diff --git a/airflow/providers/google/cloud/hooks/workflows.py b/airflow/providers/google/cloud/hooks/workflows.py
new file mode 100644
index 0000000..6c78350
--- /dev/null
+++ b/airflow/providers/google/cloud/hooks/workflows.py
@@ -0,0 +1,401 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from typing import Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core.operation import Operation
+from google.api_core.retry import Retry
+
+# pylint: disable=no-name-in-module
+from google.cloud.workflows.executions_v1beta import Execution, ExecutionsClient
+from google.cloud.workflows.executions_v1beta.services.executions.pagers import ListExecutionsPager
+from google.cloud.workflows_v1beta import Workflow, WorkflowsClient
+from google.cloud.workflows_v1beta.services.workflows.pagers import ListWorkflowsPager
+from google.protobuf.field_mask_pb2 import FieldMask
+
+from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
+
+# pylint: enable=no-name-in-module
+
+
+class WorkflowsHook(GoogleBaseHook):
+    """
+    Hook for Google GCP APIs.
+
+    All the methods in the hook where project_id is used must be called with
+    keyword arguments rather than positional.
+    """
+
+    def get_workflows_client(self) -> WorkflowsClient:
+        """Returns WorkflowsClient."""
+        return WorkflowsClient(credentials=self._get_credentials(), client_info=self.client_info)
+
+    def get_executions_client(self) -> ExecutionsClient:
+        """Returns ExecutionsClient."""
+        return ExecutionsClient(credentials=self._get_credentials(), client_info=self.client_info)
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def create_workflow(
+        self,
+        workflow: Dict,
+        workflow_id: str,
+        location: str,
+        project_id: str,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+    ) -> Operation:
+        """
+        Creates a new workflow. If a workflow with the specified name
+        already exists in the specified project and location, the long
+        running operation will return
+        [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS] error.
+
+        :param workflow: Required. Workflow to be created.
+        :type workflow: Dict
+        :param workflow_id: Required. The ID of the workflow to be created.
+        :type workflow_id: str
+        :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+        :type project_id: str
+        :param location: Required. The GCP region in which to handle the request.
+        :type location: str
+        :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+            retried.
+        :type retry: google.api_core.retry.Retry
+        :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+            ``retry`` is specified, the timeout applies to each individual attempt.
+        :type timeout: float
+        :param metadata: Additional metadata that is provided to the method.
+        :type metadata: Sequence[Tuple[str, str]]
+        """
+        metadata = metadata or ()
+        client = self.get_workflows_client()
+        parent = f"projects/{project_id}/locations/{location}"
+        return client.create_workflow(
+            request={"parent": parent, "workflow": workflow, "workflow_id": workflow_id},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def get_workflow(
+        self,
+        workflow_id: str,
+        location: str,
+        project_id: str,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+    ) -> Workflow:
+        """
+        Gets details of a single Workflow.
+
+        :param workflow_id: Required. The ID of the workflow to be created.
+        :type workflow_id: str
+        :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+        :type project_id: str
+        :param location: Required. The GCP region in which to handle the request.
+        :type location: str
+        :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+            retried.
+        :type retry: google.api_core.retry.Retry
+        :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+            ``retry`` is specified, the timeout applies to each individual attempt.
+        :type timeout: float
+        :param metadata: Additional metadata that is provided to the method.
+        :type metadata: Sequence[Tuple[str, str]]
+        """
+        metadata = metadata or ()
+        client = self.get_workflows_client()
+        name = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}"
+        return client.get_workflow(request={"name": name}, retry=retry, timeout=timeout, metadata=metadata)
+
+    def update_workflow(
+        self,
+        workflow: Union[Dict, Workflow],
+        update_mask: Optional[FieldMask] = None,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+    ) -> Operation:
+        """
+        Updates an existing workflow.
+        Running this method has no impact on already running
+        executions of the workflow. A new revision of the
+        workflow may be created as a result of a successful
+        update operation. In that case, such revision will be
+        used in new workflow executions.
+
+        :param workflow: Required. Workflow to be created.
+        :type workflow: Dict
+        :param update_mask: List of fields to be updated. If not present,
+            the entire workflow will be updated.
+        :type update_mask: FieldMask
+        :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+            retried.
+        :type retry: google.api_core.retry.Retry
+        :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+            ``retry`` is specified, the timeout applies to each individual attempt.
+        :type timeout: float
+        :param metadata: Additional metadata that is provided to the method.
+        :type metadata: Sequence[Tuple[str, str]]
+        """
+        metadata = metadata or ()
+        client = self.get_workflows_client()
+        return client.update_workflow(
+            request={"workflow": workflow, "update_mask": update_mask},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def delete_workflow(
+        self,
+        workflow_id: str,
+        location: str,
+        project_id: str,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+    ) -> Operation:
+        """
+        Deletes a workflow with the specified name.
+        This method also cancels and deletes all running
+        executions of the workflow.
+
+        :param workflow_id: Required. The ID of the workflow to be created.
+        :type workflow_id: str
+        :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+        :type project_id: str
+        :param location: Required. The GCP region in which to handle the request.
+        :type location: str
+        :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+            retried.
+        :type retry: google.api_core.retry.Retry
+        :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+            ``retry`` is specified, the timeout applies to each individual attempt.
+        :type timeout: float
+        :param metadata: Additional metadata that is provided to the method.
+        :type metadata: Sequence[Tuple[str, str]]
+        """
+        metadata = metadata or ()
+        client = self.get_workflows_client()
+        name = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}"
+        return client.delete_workflow(request={"name": name}, retry=retry, timeout=timeout, metadata=metadata)
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def list_workflows(
+        self,
+        location: str,
+        project_id: str,
+        filter_: Optional[str] = None,
+        order_by: Optional[str] = None,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+    ) -> ListWorkflowsPager:
+        """
+        Lists Workflows in a given project and location.
+        The default order is not specified.
+
+        :param filter_: Filter to restrict results to specific workflows.
+        :type filter_: str
+        :param order_by: Comma-separated list of fields that that
+            specify the order of the results. Default sorting order for a field is ascending.
+            To specify descending order for a field, append a "desc" suffix.
+            If not specified, the results will be returned in an unspecified order.
+        :type order_by: str
+        :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+        :type project_id: str
+        :param location: Required. The GCP region in which to handle the request.
+        :type location: str
+        :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+            retried.
+        :type retry: google.api_core.retry.Retry
+        :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+            ``retry`` is specified, the timeout applies to each individual attempt.
+        :type timeout: float
+        :param metadata: Additional metadata that is provided to the method.
+        :type metadata: Sequence[Tuple[str, str]]
+        """
+        metadata = metadata or ()
+        client = self.get_workflows_client()
+        parent = f"projects/{project_id}/locations/{location}"
+
+        return client.list_workflows(
+            request={"parent": parent, "filter": filter_, "order_by": order_by},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def create_execution(
+        self,
+        workflow_id: str,
+        location: str,
+        project_id: str,
+        execution: Dict,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+    ) -> Execution:
+        """
+        Creates a new execution using the latest revision of
+        the given workflow.
+
+        :param execution: Required. Input parameters of the execution represented as a dictionary.
+        :type execution: Dict
+        :param workflow_id: Required. The ID of the workflow.
+        :type workflow_id: str
+        :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+        :type project_id: str
+        :param location: Required. The GCP region in which to handle the request.
+        :type location: str
+        :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+            retried.
+        :type retry: google.api_core.retry.Retry
+        :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+            ``retry`` is specified, the timeout applies to each individual attempt.
+        :type timeout: float
+        :param metadata: Additional metadata that is provided to the method.
+        :type metadata: Sequence[Tuple[str, str]]
+        """
+        metadata = metadata or ()
+        client = self.get_executions_client()
+        parent = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}"
+        return client.create_execution(
+            request={"parent": parent, "execution": execution},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def get_execution(
+        self,
+        workflow_id: str,
+        execution_id: str,
+        location: str,
+        project_id: str,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+    ) -> Execution:
+        """
+        Returns an execution for the given ``workflow_id`` and ``execution_id``.
+
+        :param workflow_id: Required. The ID of the workflow.
+        :type workflow_id: str
+        :param execution_id: Required. The ID of the execution.
+        :type execution_id: str
+        :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+        :type project_id: str
+        :param location: Required. The GCP region in which to handle the request.
+        :type location: str
+        :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+            retried.
+        :type retry: google.api_core.retry.Retry
+        :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+            ``retry`` is specified, the timeout applies to each individual attempt.
+        :type timeout: float
+        :param metadata: Additional metadata that is provided to the method.
+        :type metadata: Sequence[Tuple[str, str]]
+        """
+        metadata = metadata or ()
+        client = self.get_executions_client()
+        name = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}/executions/{execution_id}"
+        return client.get_execution(request={"name": name}, retry=retry, timeout=timeout, metadata=metadata)
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def cancel_execution(
+        self,
+        workflow_id: str,
+        execution_id: str,
+        location: str,
+        project_id: str,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+    ) -> Execution:
+        """
+        Cancels an execution using the given ``workflow_id`` and ``execution_id``.
+
+        :param workflow_id: Required. The ID of the workflow.
+        :type workflow_id: str
+        :param execution_id: Required. The ID of the execution.
+        :type execution_id: str
+        :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+        :type project_id: str
+        :param location: Required. The GCP region in which to handle the request.
+        :type location: str
+        :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+            retried.
+        :type retry: google.api_core.retry.Retry
+        :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+            ``retry`` is specified, the timeout applies to each individual attempt.
+        :type timeout: float
+        :param metadata: Additional metadata that is provided to the method.
+        :type metadata: Sequence[Tuple[str, str]]
+        """
+        metadata = metadata or ()
+        client = self.get_executions_client()
+        name = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}/executions/{execution_id}"
+        return client.cancel_execution(
+            request={"name": name}, retry=retry, timeout=timeout, metadata=metadata
+        )
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def list_executions(
+        self,
+        workflow_id: str,
+        location: str,
+        project_id: str,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+    ) -> ListExecutionsPager:
+        """
+        Returns a list of executions which belong to the
+        workflow with the given name. The method returns
+        executions of all workflow revisions. Returned
+        executions are ordered by their start time (newest
+        first).
+
+        :param workflow_id: Required. The ID of the workflow to be created.
+        :type workflow_id: str
+        :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+        :type project_id: str
+        :param location: Required. The GCP region in which to handle the request.
+        :type location: str
+        :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+            retried.
+        :type retry: google.api_core.retry.Retry
+        :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+            ``retry`` is specified, the timeout applies to each individual attempt.
+        :type timeout: float
+        :param metadata: Additional metadata that is provided to the method.
+        :type metadata: Sequence[Tuple[str, str]]
+        """
+        metadata = metadata or ()
+        client = self.get_executions_client()
+        parent = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}"
+        return client.list_executions(
+            request={"parent": parent}, retry=retry, timeout=timeout, metadata=metadata
+        )
diff --git a/airflow/providers/google/cloud/operators/workflows.py b/airflow/providers/google/cloud/operators/workflows.py
new file mode 100644
index 0000000..c7fc96d
--- /dev/null
+++ b/airflow/providers/google/cloud/operators/workflows.py
@@ -0,0 +1,714 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import hashlib
+import json
+import re
+import uuid
+from datetime import datetime, timedelta
+from typing import Dict, Optional, Sequence, Tuple, Union
+
+import pytz
+from google.api_core.exceptions import AlreadyExists
+from google.api_core.retry import Retry
+
+# pylint: disable=no-name-in-module
+from google.cloud.workflows.executions_v1beta import Execution
+from google.cloud.workflows_v1beta import Workflow
+
+# pylint: enable=no-name-in-module
+from google.protobuf.field_mask_pb2 import FieldMask
+
+from airflow.models import BaseOperator
+from airflow.providers.google.cloud.hooks.workflows import WorkflowsHook
+
+
+class WorkflowsCreateWorkflowOperator(BaseOperator):
+    """
+    Creates a new workflow. If a workflow with the specified name
+    already exists in the specified project and location, the long
+    running operation will return
+    [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS] error.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:WorkflowsCreateWorkflowOperator`
+
+    :param workflow: Required. Workflow to be created.
+    :type workflow: Dict
+    :param workflow_id: Required. The ID of the workflow to be created.
+    :type workflow_id: str
+    :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+    :type project_id: str
+    :param location: Required. The GCP region in which to handle the request.
+    :type location: str
+    :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+        retried.
+    :type retry: google.api_core.retry.Retry
+    :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+        ``retry`` is specified, the timeout applies to each individual attempt.
+    :type timeout: float
+    :param metadata: Additional metadata that is provided to the method.
+    :type metadata: Sequence[Tuple[str, str]]
+    """
+
+    template_fields = ("location", "workflow", "workflow_id")
+    template_fields_renderers = {"workflow": "json"}
+
+    def __init__(
+        self,
+        *,
+        workflow: Dict,
+        workflow_id: str,
+        location: str,
+        project_id: Optional[str] = None,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        force_rerun: bool = False,
+        impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+
+        self.workflow = workflow
+        self.workflow_id = workflow_id
+        self.location = location
+        self.project_id = project_id
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+        self.force_rerun = force_rerun
+
+    def _workflow_id(self, context):
+        if self.workflow_id and not self.force_rerun:
+            # If users provide workflow id then assuring the idempotency
+            # is on their side
+            return self.workflow_id
+
+        if self.force_rerun:
+            hash_base = str(uuid.uuid4())
+        else:
+            hash_base = json.dumps(self.workflow, sort_keys=True)
+
+        # We are limited by allowed length of workflow_id so
+        # we use hash of whole information
+        exec_date = context['execution_date'].isoformat()
+        base = f"airflow_{self.dag_id}_{self.task_id}_{exec_date}_{hash_base}"
+        workflow_id = hashlib.md5(base.encode()).hexdigest()
+        return re.sub(r"[:\-+.]", "_", workflow_id)
+
+    def execute(self, context):
+        hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
+        workflow_id = self._workflow_id(context)
+
+        self.log.info("Creating workflow")
+        try:
+            operation = hook.create_workflow(
+                workflow=self.workflow,
+                workflow_id=workflow_id,
+                location=self.location,
+                project_id=self.project_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+            workflow = operation.result()
+        except AlreadyExists:
+            workflow = hook.get_workflow(
+                workflow_id=workflow_id,
+                location=self.location,
+                project_id=self.project_id,
+                retry=self.retry,
+                timeout=self.timeout,
+                metadata=self.metadata,
+            )
+        return Workflow.to_dict(workflow)
+
+
+class WorkflowsUpdateWorkflowOperator(BaseOperator):
+    """
+    Updates an existing workflow.
+    Running this method has no impact on already running
+    executions of the workflow. A new revision of the
+    workflow may be created as a result of a successful
+    update operation. In that case, such revision will be
+    used in new workflow executions.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:WorkflowsUpdateWorkflowOperator`
+
+    :param workflow_id: Required. The ID of the workflow to be updated.
+    :type workflow_id: str
+    :param location: Required. The GCP region in which to handle the request.
+    :type location: str
+    :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+    :type project_id: str
+    :param update_mask: List of fields to be updated. If not present,
+        the entire workflow will be updated.
+    :type update_mask: FieldMask
+    :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+        retried.
+    :type retry: google.api_core.retry.Retry
+    :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+        ``retry`` is specified, the timeout applies to each individual attempt.
+    :type timeout: float
+    :param metadata: Additional metadata that is provided to the method.
+    :type metadata: Sequence[Tuple[str, str]]
+    """
+
+    template_fields = ("workflow_id", "update_mask")
+    template_fields_renderers = {"update_mask": "json"}
+
+    def __init__(
+        self,
+        *,
+        workflow_id: str,
+        location: str,
+        project_id: Optional[str] = None,
+        update_mask: Optional[FieldMask] = None,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+
+        self.workflow_id = workflow_id
+        self.location = location
+        self.project_id = project_id
+        self.update_mask = update_mask
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context):
+        hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
+
+        workflow = hook.get_workflow(
+            workflow_id=self.workflow_id,
+            project_id=self.project_id,
+            location=self.location,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        self.log.info("Updating workflow")
+        operation = hook.update_workflow(
+            workflow=workflow,
+            update_mask=self.update_mask,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        workflow = operation.result()
+        return Workflow.to_dict(workflow)
+
+
+class WorkflowsDeleteWorkflowOperator(BaseOperator):
+    """
+    Deletes a workflow with the specified name.
+    This method also cancels and deletes all running
+    executions of the workflow.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:WorkflowsDeleteWorkflowOperator`
+
+    :param workflow_id: Required. The ID of the workflow to be created.
+    :type workflow_id: str
+    :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+    :type project_id: str
+    :param location: Required. The GCP region in which to handle the request.
+    :type location: str
+    :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+        retried.
+    :type retry: google.api_core.retry.Retry
+    :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+        ``retry`` is specified, the timeout applies to each individual attempt.
+    :type timeout: float
+    :param metadata: Additional metadata that is provided to the method.
+    :type metadata: Sequence[Tuple[str, str]]
+    """
+
+    template_fields = ("location", "workflow_id")
+
+    def __init__(
+        self,
+        *,
+        workflow_id: str,
+        location: str,
+        project_id: Optional[str] = None,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+
+        self.workflow_id = workflow_id
+        self.location = location
+        self.project_id = project_id
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context):
+        hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
+        self.log.info("Deleting workflow %s", self.workflow_id)
+        operation = hook.delete_workflow(
+            workflow_id=self.workflow_id,
+            location=self.location,
+            project_id=self.project_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        operation.result()
+
+
+class WorkflowsListWorkflowsOperator(BaseOperator):
+    """
+    Lists Workflows in a given project and location.
+    The default order is not specified.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:WorkflowsListWorkflowsOperator`
+
+    :param filter_: Filter to restrict results to specific workflows.
+    :type filter_: str
+    :param order_by: Comma-separated list of fields that that
+        specify the order of the results. Default sorting order for a field is ascending.
+        To specify descending order for a field, append a "desc" suffix.
+        If not specified, the results will be returned in an unspecified order.
+    :type order_by: str
+    :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+    :type project_id: str
+    :param location: Required. The GCP region in which to handle the request.
+    :type location: str
+    :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+        retried.
+    :type retry: google.api_core.retry.Retry
+    :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+        ``retry`` is specified, the timeout applies to each individual attempt.
+    :type timeout: float
+    :param metadata: Additional metadata that is provided to the method.
+    :type metadata: Sequence[Tuple[str, str]]
+    """
+
+    template_fields = ("location", "order_by", "filter_")
+
+    def __init__(
+        self,
+        *,
+        location: str,
+        project_id: Optional[str] = None,
+        filter_: Optional[str] = None,
+        order_by: Optional[str] = None,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+
+        self.filter_ = filter_
+        self.order_by = order_by
+        self.location = location
+        self.project_id = project_id
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context):
+        hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
+        self.log.info("Retrieving workflows")
+        workflows_iter = hook.list_workflows(
+            filter_=self.filter_,
+            order_by=self.order_by,
+            location=self.location,
+            project_id=self.project_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        return [Workflow.to_dict(w) for w in workflows_iter]
+
+
+class WorkflowsGetWorkflowOperator(BaseOperator):
+    """
+    Gets details of a single Workflow.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:WorkflowsGetWorkflowOperator`
+
+    :param workflow_id: Required. The ID of the workflow to be created.
+    :type workflow_id: str
+    :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+    :type project_id: str
+    :param location: Required. The GCP region in which to handle the request.
+    :type location: str
+    :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+        retried.
+    :type retry: google.api_core.retry.Retry
+    :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+        ``retry`` is specified, the timeout applies to each individual attempt.
+    :type timeout: float
+    :param metadata: Additional metadata that is provided to the method.
+    :type metadata: Sequence[Tuple[str, str]]
+    """
+
+    template_fields = ("location", "workflow_id")
+
+    def __init__(
+        self,
+        *,
+        workflow_id: str,
+        location: str,
+        project_id: Optional[str] = None,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+
+        self.workflow_id = workflow_id
+        self.location = location
+        self.project_id = project_id
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context):
+        hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
+        self.log.info("Retrieving workflow")
+        workflow = hook.get_workflow(
+            workflow_id=self.workflow_id,
+            location=self.location,
+            project_id=self.project_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        return Workflow.to_dict(workflow)
+
+
+class WorkflowsCreateExecutionOperator(BaseOperator):
+    """
+    Creates a new execution using the latest revision of
+    the given workflow.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:WorkflowsCreateExecutionOperator`
+
+    :param execution: Required. Execution to be created.
+    :type execution: Dict
+    :param workflow_id: Required. The ID of the workflow.
+    :type workflow_id: str
+    :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+    :type project_id: str
+    :param location: Required. The GCP region in which to handle the request.
+    :type location: str
+    :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+        retried.
+    :type retry: google.api_core.retry.Retry
+    :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+        ``retry`` is specified, the timeout applies to each individual attempt.
+    :type timeout: float
+    :param metadata: Additional metadata that is provided to the method.
+    :type metadata: Sequence[Tuple[str, str]]
+    """
+
+    template_fields = ("location", "workflow_id", "execution")
+    template_fields_renderers = {"execution": "json"}
+
+    def __init__(
+        self,
+        *,
+        workflow_id: str,
+        execution: Dict,
+        location: str,
+        project_id: Optional[str] = None,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+
+        self.workflow_id = workflow_id
+        self.execution = execution
+        self.location = location
+        self.project_id = project_id
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context):
+        hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
+        self.log.info("Creating execution")
+        execution = hook.create_execution(
+            workflow_id=self.workflow_id,
+            execution=self.execution,
+            location=self.location,
+            project_id=self.project_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        execution_id = execution.name.split("/")[-1]
+        self.xcom_push(context, key="execution_id", value=execution_id)
+        return Execution.to_dict(execution)
+
+
+class WorkflowsCancelExecutionOperator(BaseOperator):
+    """
+    Cancels an execution using the given ``workflow_id`` and ``execution_id``.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:WorkflowsCancelExecutionOperator`
+
+    :param workflow_id: Required. The ID of the workflow.
+    :type workflow_id: str
+    :param execution_id: Required. The ID of the execution.
+    :type execution_id: str
+    :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+    :type project_id: str
+    :param location: Required. The GCP region in which to handle the request.
+    :type location: str
+    :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+        retried.
+    :type retry: google.api_core.retry.Retry
+    :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+        ``retry`` is specified, the timeout applies to each individual attempt.
+    :type timeout: float
+    :param metadata: Additional metadata that is provided to the method.
+    :type metadata: Sequence[Tuple[str, str]]
+    """
+
+    template_fields = ("location", "workflow_id", "execution_id")
+
+    def __init__(
+        self,
+        *,
+        workflow_id: str,
+        execution_id: str,
+        location: str,
+        project_id: Optional[str] = None,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+
+        self.workflow_id = workflow_id
+        self.execution_id = execution_id
+        self.location = location
+        self.project_id = project_id
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context):
+        hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
+        self.log.info("Canceling execution %s", self.execution_id)
+        execution = hook.cancel_execution(
+            workflow_id=self.workflow_id,
+            execution_id=self.execution_id,
+            location=self.location,
+            project_id=self.project_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        return Execution.to_dict(execution)
+
+
+class WorkflowsListExecutionsOperator(BaseOperator):
+    """
+    Returns a list of executions which belong to the
+    workflow with the given name. The method returns
+    executions of all workflow revisions. Returned
+    executions are ordered by their start time (newest
+    first).
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:WorkflowsListExecutionsOperator`
+
+    :param workflow_id: Required. The ID of the workflow to be created.
+    :type workflow_id: str
+    :param start_date_filter: If passed only executions older that this date will be returned.
+        By default operators return executions from last 60 minutes
+    :type start_date_filter: datetime
+    :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+    :type project_id: str
+    :param location: Required. The GCP region in which to handle the request.
+    :type location: str
+    :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+        retried.
+    :type retry: google.api_core.retry.Retry
+    :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+        ``retry`` is specified, the timeout applies to each individual attempt.
+    :type timeout: float
+    :param metadata: Additional metadata that is provided to the method.
+    :type metadata: Sequence[Tuple[str, str]]
+    """
+
+    template_fields = ("location", "workflow_id")
+
+    def __init__(
+        self,
+        *,
+        workflow_id: str,
+        location: str,
+        start_date_filter: Optional[datetime] = None,
+        project_id: Optional[str] = None,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+
+        self.workflow_id = workflow_id
+        self.location = location
+        self.start_date_filter = start_date_filter or datetime.now(tz=pytz.UTC) - timedelta(minutes=60)
+        self.project_id = project_id
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context):
+        hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
+        self.log.info("Retrieving executions for workflow %s", self.workflow_id)
+        execution_iter = hook.list_executions(
+            workflow_id=self.workflow_id,
+            location=self.location,
+            project_id=self.project_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+
+        return [Execution.to_dict(e) for e in execution_iter if e.start_time > self.start_date_filter]
+
+
+class WorkflowsGetExecutionOperator(BaseOperator):
+    """
+    Returns an execution for the given ``workflow_id`` and ``execution_id``.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:WorkflowsGetExecutionOperator`
+
+    :param workflow_id: Required. The ID of the workflow.
+    :type workflow_id: str
+    :param execution_id: Required. The ID of the execution.
+    :type execution_id: str
+    :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+    :type project_id: str
+    :param location: Required. The GCP region in which to handle the request.
+    :type location: str
+    :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+        retried.
+    :type retry: google.api_core.retry.Retry
+    :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+        ``retry`` is specified, the timeout applies to each individual attempt.
+    :type timeout: float
+    :param metadata: Additional metadata that is provided to the method.
+    :type metadata: Sequence[Tuple[str, str]]
+    """
+
+    template_fields = ("location", "workflow_id", "execution_id")
+
+    def __init__(
+        self,
+        *,
+        workflow_id: str,
+        execution_id: str,
+        location: str,
+        project_id: Optional[str] = None,
+        retry: Optional[Retry] = None,
+        timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+
+        self.workflow_id = workflow_id
+        self.execution_id = execution_id
+        self.location = location
+        self.project_id = project_id
+        self.retry = retry
+        self.timeout = timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def execute(self, context):
+        hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
+        self.log.info("Retrieving execution %s for workflow %s", self.execution_id, self.workflow_id)
+        execution = hook.get_execution(
+            workflow_id=self.workflow_id,
+            execution_id=self.execution_id,
+            location=self.location,
+            project_id=self.project_id,
+            retry=self.retry,
+            timeout=self.timeout,
+            metadata=self.metadata,
+        )
+        return Execution.to_dict(execution)
diff --git a/airflow/providers/google/cloud/sensors/workflows.py b/airflow/providers/google/cloud/sensors/workflows.py
new file mode 100644
index 0000000..5950458
--- /dev/null
+++ b/airflow/providers/google/cloud/sensors/workflows.py
@@ -0,0 +1,123 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from typing import Optional, Sequence, Set, Tuple, Union
+
+from google.api_core.retry import Retry
+from google.cloud.workflows.executions_v1beta import Execution
+
+from airflow.exceptions import AirflowException
+from airflow.providers.google.cloud.hooks.workflows import WorkflowsHook
+from airflow.sensors.base import BaseSensorOperator
+
+
+class WorkflowExecutionSensor(BaseSensorOperator):
+    """
+    Checks state of an execution for the given ``workflow_id`` and ``execution_id``.
+
+    :param workflow_id: Required. The ID of the workflow.
+    :type workflow_id: str
+    :param execution_id: Required. The ID of the execution.
+    :type execution_id: str
+    :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
+    :type project_id: str
+    :param location: Required. The Cloud Dataproc region in which to handle the request.
+    :type location: str
+    :param success_states: Execution states to be considered as successful, by default
+        it's only ``SUCCEEDED`` state
+    :type success_states: List[Execution.State]
+    :param failure_states: Execution states to be considered as failures, by default
+        they are ``FAILED`` and ``CANCELLED`` states.
+    :type failure_states: List[Execution.State]
+    :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be
+        retried.
+    :type retry: google.api_core.retry.Retry
+    :param request_timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
+        ``retry`` is specified, the timeout applies to each individual attempt.
+    :type request_timeout: float
+    :param metadata: Additional metadata that is provided to the method.
+    :type metadata: Sequence[Tuple[str, str]]
+    """
+
+    template_fields = ("location", "workflow_id", "execution_id")
+
+    def __init__(
+        self,
+        *,
+        workflow_id: str,
+        execution_id: str,
+        location: str,
+        project_id: str,
+        success_states: Optional[Set[Execution.State]] = None,
+        failure_states: Optional[Set[Execution.State]] = None,
+        retry: Optional[Retry] = None,
+        request_timeout: Optional[float] = None,
+        metadata: Optional[Sequence[Tuple[str, str]]] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+
+        self.success_states = success_states or {Execution.State.SUCCEEDED}
+        self.failure_states = failure_states or {Execution.State.FAILED, Execution.State.CANCELLED}
+        self.workflow_id = workflow_id
+        self.execution_id = execution_id
+        self.location = location
+        self.project_id = project_id
+        self.retry = retry
+        self.request_timeout = request_timeout
+        self.metadata = metadata
+        self.gcp_conn_id = gcp_conn_id
+        self.impersonation_chain = impersonation_chain
+
+    def poke(self, context):
+        hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
+        self.log.info("Checking state of execution %s for workflow %s", self.execution_id, self.workflow_id)
+        execution: Execution = hook.get_execution(
+            workflow_id=self.workflow_id,
+            execution_id=self.execution_id,
+            location=self.location,
+            project_id=self.project_id,
+            retry=self.retry,
+            timeout=self.request_timeout,
+            metadata=self.metadata,
+        )
+
+        state = execution.state
+        if state in self.failure_states:
+            raise AirflowException(
+                f"Execution {self.execution_id} for workflow {self.execution_id} "
+                f"failed and is in `{state}` state",
+            )
+
+        if state in self.success_states:
+            self.log.info(
+                "Execution %s for workflow %s completed with state: %s",
+                self.execution_id,
+                self.workflow_id,
+                state,
+            )
+            return True
+
+        self.log.info(
+            "Execution %s for workflow %s does not completed yet, current state: %s",
+            self.execution_id,
+            self.workflow_id,
+            state,
+        )
+        return False
diff --git a/airflow/providers/google/provider.yaml b/airflow/providers/google/provider.yaml
index 9961b13..690eb00 100644
--- a/airflow/providers/google/provider.yaml
+++ b/airflow/providers/google/provider.yaml
@@ -277,6 +277,11 @@ integrations:
       - /docs/apache-airflow-providers-google/operators/cloud/natural_language.rst
     logo: /integration-logos/gcp/Cloud-NLP.png
     tags: [gcp]
+  - integration-name: Google Cloud Workflows
+    external-doc-url: https://cloud.google.com/workflows/
+    how-to-guide:
+      - /docs/apache-airflow-providers-google/operators/cloud/workflows.rst
+    tags: [gcp]
 
 operators:
   - integration-name: Google Ads
@@ -377,6 +382,9 @@ operators:
   - integration-name: Google Cloud Vision
     python-modules:
       - airflow.providers.google.cloud.operators.vision
+  - integration-name: Google Cloud Workflows
+    python-modules:
+      - airflow.providers.google.cloud.operators.workflows
   - integration-name: Google Cloud Firestore
     python-modules:
       - airflow.providers.google.firebase.operators.firestore
@@ -421,6 +429,9 @@ sensors:
   - integration-name: Google Cloud Pub/Sub
     python-modules:
       - airflow.providers.google.cloud.sensors.pubsub
+  - integration-name: Google Cloud Workflows
+    python-modules:
+      - airflow.providers.google.cloud.sensors.workflows
   - integration-name: Google Campaign Manager
     python-modules:
       - airflow.providers.google.marketing_platform.sensors.campaign_manager
@@ -541,6 +552,9 @@ hooks:
   - integration-name: Google Cloud Vision
     python-modules:
       - airflow.providers.google.cloud.hooks.vision
+  - integration-name: Google Cloud Workflows
+    python-modules:
+      - airflow.providers.google.cloud.hooks.workflows
   - integration-name: Google
     python-modules:
       - airflow.providers.google.common.hooks.base_google
diff --git a/docs/apache-airflow-providers-google/operators/cloud/workflows.rst b/docs/apache-airflow-providers-google/operators/cloud/workflows.rst
new file mode 100644
index 0000000..551a7ca
--- /dev/null
+++ b/docs/apache-airflow-providers-google/operators/cloud/workflows.rst
@@ -0,0 +1,185 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+Google Cloud Workflows Operators
+================================
+
+You can use Workflows to create serverless workflows that link series of serverless tasks together
+in an order you define. Combine the power of Google Cloud's APIs, serverless products like Cloud
+Functions and Cloud Run, and calls to external APIs to create flexible serverless applications.
+
+For more information about the service visit
+`Workflows production documentation <Product documentation <https://cloud.google.com/workflows/docs/overview>`__.
+
+.. contents::
+  :depth: 1
+  :local:
+
+Prerequisite Tasks
+------------------
+
+.. include::/operators/_partials/prerequisite_tasks.rst
+
+
+.. _howto/operator:WorkflowsCreateWorkflowOperator:
+
+Create workflow
+===============
+
+To create a workflow use
+:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCreateWorkflowOperator`.
+
+.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py
+      :language: python
+      :dedent: 4
+      :start-after: [START how_to_create_workflow]
+      :end-before: [END how_to_create_workflow]
+
+The workflow should be define in similar why to this example:
+
+.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py
+      :language: python
+      :dedent: 0
+      :start-after: [START how_to_define_workflow]
+      :end-before: [END how_to_define_workflow]
+
+For more information about authoring workflows check official
+production documentation `<Product documentation <https://cloud.google.com/workflows/docs/overview>`__.
+
+
+.. _howto/operator:WorkflowsUpdateWorkflowOperator:
+
+Update workflow
+===============
+
+To update a workflow use
+:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsUpdateWorkflowOperator`.
+
+.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py
+      :language: python
+      :dedent: 4
+      :start-after: [START how_to_update_workflow]
+      :end-before: [END how_to_update_workflow]
+
+.. _howto/operator:WorkflowsGetWorkflowOperator:
+
+Get workflow
+============
+
+To get a workflow use
+:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsGetWorkflowOperator`.
+
+.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py
+      :language: python
+      :dedent: 4
+      :start-after: [START how_to_get_workflow]
+      :end-before: [END how_to_get_workflow]
+
+.. _howto/operator:WorkflowsListWorkflowsOperator:
+
+List workflows
+==============
+
+To list workflows use
+:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsListWorkflowsOperator`.
+
+.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py
+      :language: python
+      :dedent: 4
+      :start-after: [START how_to_list_workflows]
+      :end-before: [END how_to_list_workflows]
+
+.. _howto/operator:WorkflowsDeleteWorkflowOperator:
+
+Delete workflow
+===============
+
+To delete a workflow use
+:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsDeleteWorkflowOperator`.
+
+.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py
+      :language: python
+      :dedent: 4
+      :start-after: [START how_to_delete_workflow]
+      :end-before: [END how_to_delete_workflow]
+
+.. _howto/operator:WorkflowsCreateExecutionOperator:
+
+Create execution
+================
+
+To create an execution use
+:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCreateExecutionOperator`.
+This operator is not idempotent due to API limitation.
+
+.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py
+      :language: python
+      :dedent: 4
+      :start-after: [START how_to_create_execution]
+      :end-before: [END how_to_create_execution]
+
+The create operator does not wait for execution to complete. To wait for execution result use
+:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowExecutionSensor`.
+
+.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py
+      :language: python
+      :dedent: 4
+      :start-after: [START how_to_wait_for_execution]
+      :end-before: [END how_to_wait_for_execution]
+
+.. _howto/operator:WorkflowsGetExecutionOperator:
+
+Get execution
+================
+
+To get an execution use
+:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsGetExecutionOperator`.
+
+.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py
+      :language: python
+      :dedent: 4
+      :start-after: [START how_to_get_execution]
+      :end-before: [END how_to_get_execution]
+
+.. _howto/operator:WorkflowsListExecutionsOperator:
+
+List executions
+===============
+
+To list executions use
+:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsListExecutionsOperator`.
+By default this operator will return only executions for last 60 minutes.
+
+.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py
+      :language: python
+      :dedent: 4
+      :start-after: [START how_to_list_executions]
+      :end-before: [END how_to_list_executions]
+
+.. _howto/operator:WorkflowsCancelExecutionOperator:
+
+Cancel execution
+================
+
+To cancel an execution use
+:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCancelExecutionOperator`.
+
+.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py
+      :language: python
+      :dedent: 4
+      :start-after: [START how_to_cancel_execution]
+      :end-before: [END how_to_cancel_execution]
diff --git a/setup.py b/setup.py
index 0689bd5..7071795 100644
--- a/setup.py
+++ b/setup.py
@@ -279,6 +279,7 @@ flask_oauth = [
 google = [
     'PyOpenSSL',
     'google-ads>=4.0.0,<8.0.0',
+    'google-api-core>=1.25.1,<2.0.0',
     'google-api-python-client>=1.6.0,<2.0.0',
     'google-auth>=1.0.0,<2.0.0',
     'google-auth-httplib2>=0.0.1',
@@ -306,6 +307,7 @@ google = [
     'google-cloud-translate>=1.5.0,<2.0.0',
     'google-cloud-videointelligence>=1.7.0,<2.0.0',
     'google-cloud-vision>=0.35.2,<2.0.0',
+    'google-cloud-workflows>=0.1.0,<2.0.0',
     'grpcio-gcp>=0.2.2',
     'json-merge-patch~=0.2',
     'pandas-gbq',
diff --git a/tests/providers/google/cloud/hooks/test_workflows.py b/tests/providers/google/cloud/hooks/test_workflows.py
new file mode 100644
index 0000000..4f3d4d0
--- /dev/null
+++ b/tests/providers/google/cloud/hooks/test_workflows.py
@@ -0,0 +1,256 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from unittest import mock
+
+from airflow.providers.google.cloud.hooks.workflows import WorkflowsHook
+
+BASE_PATH = "airflow.providers.google.cloud.hooks.workflows.{}"
+LOCATION = "europe-west1"
+WORKFLOW_ID = "workflow_id"
+EXECUTION_ID = "execution_id"
+WORKFLOW = {"aa": "bb"}
+EXECUTION = {"ccc": "ddd"}
+PROJECT_ID = "airflow-testing"
+METADATA = ()
+TIMEOUT = None
+RETRY = None
+FILTER_ = "aaaa"
+ORDER_BY = "bbb"
+UPDATE_MASK = "aaa,bbb"
+
+WORKFLOW_PARENT = f"projects/{PROJECT_ID}/locations/{LOCATION}"
+WORKFLOW_NAME = f"projects/{PROJECT_ID}/locations/{LOCATION}/workflows/{WORKFLOW_ID}"
+EXECUTION_PARENT = f"projects/{PROJECT_ID}/locations/{LOCATION}/workflows/{WORKFLOW_ID}"
+EXECUTION_NAME = (
+    f"projects/{PROJECT_ID}/locations/{LOCATION}/workflows/{WORKFLOW_ID}/executions/{EXECUTION_ID}"
+)
+
+
+def mock_init(*args, **kwargs):
+    pass
+
+
+class TestWorkflowsHook:
+    def setup_method(self, _):
+        with mock.patch(BASE_PATH.format("GoogleBaseHook.__init__"), new=mock_init):
+            self.hook = WorkflowsHook(gcp_conn_id="test")  # pylint: disable=attribute-defined-outside-init
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook._get_credentials"))
+    @mock.patch(BASE_PATH.format("WorkflowsHook.client_info"), new_callable=mock.PropertyMock)
+    @mock.patch(BASE_PATH.format("WorkflowsClient"))
+    def test_get_workflows_client(self, mock_client, mock_client_info, mock_get_credentials):
+        self.hook.get_workflows_client()
+        mock_client.assert_called_once_with(
+            credentials=mock_get_credentials.return_value,
+            client_info=mock_client_info.return_value,
+        )
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook._get_credentials"))
+    @mock.patch(BASE_PATH.format("WorkflowsHook.client_info"), new_callable=mock.PropertyMock)
+    @mock.patch(BASE_PATH.format("ExecutionsClient"))
+    def test_get_executions_client(self, mock_client, mock_client_info, mock_get_credentials):
+        self.hook.get_executions_client()
+        mock_client.assert_called_once_with(
+            credentials=mock_get_credentials.return_value,
+            client_info=mock_client_info.return_value,
+        )
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook.get_workflows_client"))
+    def test_create_workflow(self, mock_client):
+        result = self.hook.create_workflow(
+            workflow=WORKFLOW,
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert mock_client.return_value.create_workflow.return_value == result
+        mock_client.return_value.create_workflow.assert_called_once_with(
+            request=dict(workflow=WORKFLOW, workflow_id=WORKFLOW_ID, parent=WORKFLOW_PARENT),
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook.get_workflows_client"))
+    def test_get_workflow(self, mock_client):
+        result = self.hook.get_workflow(
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert mock_client.return_value.get_workflow.return_value == result
+        mock_client.return_value.get_workflow.assert_called_once_with(
+            request=dict(name=WORKFLOW_NAME),
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook.get_workflows_client"))
+    def test_update_workflow(self, mock_client):
+        result = self.hook.update_workflow(
+            workflow=WORKFLOW,
+            update_mask=UPDATE_MASK,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert mock_client.return_value.update_workflow.return_value == result
+        mock_client.return_value.update_workflow.assert_called_once_with(
+            request=dict(
+                workflow=WORKFLOW,
+                update_mask=UPDATE_MASK,
+            ),
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook.get_workflows_client"))
+    def test_delete_workflow(self, mock_client):
+        result = self.hook.delete_workflow(
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert mock_client.return_value.delete_workflow.return_value == result
+        mock_client.return_value.delete_workflow.assert_called_once_with(
+            request=dict(name=WORKFLOW_NAME),
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook.get_workflows_client"))
+    def test_list_workflows(self, mock_client):
+        result = self.hook.list_workflows(
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            filter_=FILTER_,
+            order_by=ORDER_BY,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert mock_client.return_value.list_workflows.return_value == result
+        mock_client.return_value.list_workflows.assert_called_once_with(
+            request=dict(
+                parent=WORKFLOW_PARENT,
+                filter=FILTER_,
+                order_by=ORDER_BY,
+            ),
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook.get_executions_client"))
+    def test_create_execution(self, mock_client):
+        result = self.hook.create_execution(
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            execution=EXECUTION,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert mock_client.return_value.create_execution.return_value == result
+        mock_client.return_value.create_execution.assert_called_once_with(
+            request=dict(
+                parent=EXECUTION_PARENT,
+                execution=EXECUTION,
+            ),
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook.get_executions_client"))
+    def test_get_execution(self, mock_client):
+        result = self.hook.get_execution(
+            workflow_id=WORKFLOW_ID,
+            execution_id=EXECUTION_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert mock_client.return_value.get_execution.return_value == result
+        mock_client.return_value.get_execution.assert_called_once_with(
+            request=dict(name=EXECUTION_NAME),
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook.get_executions_client"))
+    def test_cancel_execution(self, mock_client):
+        result = self.hook.cancel_execution(
+            workflow_id=WORKFLOW_ID,
+            execution_id=EXECUTION_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert mock_client.return_value.cancel_execution.return_value == result
+        mock_client.return_value.cancel_execution.assert_called_once_with(
+            request=dict(name=EXECUTION_NAME),
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook.get_executions_client"))
+    def test_list_execution(self, mock_client):
+        result = self.hook.list_executions(
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert mock_client.return_value.list_executions.return_value == result
+        mock_client.return_value.list_executions.assert_called_once_with(
+            request=dict(parent=EXECUTION_PARENT),
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
diff --git a/tests/providers/google/cloud/operators/test_workflows.py b/tests/providers/google/cloud/operators/test_workflows.py
new file mode 100644
index 0000000..5578548
--- /dev/null
+++ b/tests/providers/google/cloud/operators/test_workflows.py
@@ -0,0 +1,383 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import datetime
+from unittest import mock
+
+import pytz
+
+from airflow.providers.google.cloud.operators.workflows import (
+    WorkflowsCancelExecutionOperator,
+    WorkflowsCreateExecutionOperator,
+    WorkflowsCreateWorkflowOperator,
+    WorkflowsDeleteWorkflowOperator,
+    WorkflowsGetExecutionOperator,
+    WorkflowsGetWorkflowOperator,
+    WorkflowsListExecutionsOperator,
+    WorkflowsListWorkflowsOperator,
+    WorkflowsUpdateWorkflowOperator,
+)
+
+BASE_PATH = "airflow.providers.google.cloud.operators.workflows.{}"
+LOCATION = "europe-west1"
+WORKFLOW_ID = "workflow_id"
+EXECUTION_ID = "execution_id"
+WORKFLOW = {"aa": "bb"}
+EXECUTION = {"ccc": "ddd"}
+PROJECT_ID = "airflow-testing"
+METADATA = None
+TIMEOUT = None
+RETRY = None
+FILTER_ = "aaaa"
+ORDER_BY = "bbb"
+UPDATE_MASK = "aaa,bbb"
+GCP_CONN_ID = "test-conn"
+IMPERSONATION_CHAIN = None
+
+
+class TestWorkflowsCreateWorkflowOperator:
+    @mock.patch(BASE_PATH.format("Workflow"))
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    def test_execute(self, mock_hook, mock_object):
+        op = WorkflowsCreateWorkflowOperator(
+            task_id="test_task",
+            workflow=WORKFLOW,
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        result = op.execute({})
+
+        mock_hook.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+
+        mock_hook.return_value.create_workflow.assert_called_once_with(
+            workflow=WORKFLOW,
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert result == mock_object.to_dict.return_value
+
+
+class TestWorkflowsUpdateWorkflowOperator:
+    @mock.patch(BASE_PATH.format("Workflow"))
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    def test_execute(self, mock_hook, mock_object):
+        op = WorkflowsUpdateWorkflowOperator(
+            task_id="test_task",
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            update_mask=UPDATE_MASK,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        result = op.execute({})
+
+        mock_hook.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+
+        mock_hook.return_value.get_workflow.assert_called_once_with(
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        mock_hook.return_value.update_workflow.assert_called_once_with(
+            workflow=mock_hook.return_value.get_workflow.return_value,
+            update_mask=UPDATE_MASK,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert result == mock_object.to_dict.return_value
+
+
+class TestWorkflowsDeleteWorkflowOperator:
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    def test_execute(
+        self,
+        mock_hook,
+    ):
+        op = WorkflowsDeleteWorkflowOperator(
+            task_id="test_task",
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        op.execute({})
+
+        mock_hook.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+
+        mock_hook.return_value.delete_workflow.assert_called_once_with(
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+
+class TestWorkflowsListWorkflowsOperator:
+    @mock.patch(BASE_PATH.format("Workflow"))
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    def test_execute(self, mock_hook, mock_object):
+        workflow_mock = mock.MagicMock()
+        workflow_mock.start_time = datetime.datetime.now(tz=pytz.UTC) + datetime.timedelta(minutes=5)
+        mock_hook.return_value.list_workflows.return_value = [workflow_mock]
+
+        op = WorkflowsListWorkflowsOperator(
+            task_id="test_task",
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            filter_=FILTER_,
+            order_by=ORDER_BY,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        result = op.execute({})
+
+        mock_hook.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+
+        mock_hook.return_value.list_workflows.assert_called_once_with(
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            filter_=FILTER_,
+            order_by=ORDER_BY,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert result == [mock_object.to_dict.return_value]
+
+
+class TestWorkflowsGetWorkflowOperator:
+    @mock.patch(BASE_PATH.format("Workflow"))
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    def test_execute(self, mock_hook, mock_object):
+        op = WorkflowsGetWorkflowOperator(
+            task_id="test_task",
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        result = op.execute({})
+
+        mock_hook.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+
+        mock_hook.return_value.get_workflow.assert_called_once_with(
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert result == mock_object.to_dict.return_value
+
+
+class TestWorkflowExecutionsCreateExecutionOperator:
+    @mock.patch(BASE_PATH.format("Execution"))
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    @mock.patch(BASE_PATH.format("WorkflowsCreateExecutionOperator.xcom_push"))
+    def test_execute(self, mock_xcom, mock_hook, mock_object):
+        mock_hook.return_value.create_execution.return_value.name = "name/execution_id"
+        op = WorkflowsCreateExecutionOperator(
+            task_id="test_task",
+            workflow_id=WORKFLOW_ID,
+            execution=EXECUTION,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        result = op.execute({})
+
+        mock_hook.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+
+        mock_hook.return_value.create_execution.assert_called_once_with(
+            workflow_id=WORKFLOW_ID,
+            execution=EXECUTION,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+        mock_xcom.assert_called_once_with({}, key="execution_id", value="execution_id")
+        assert result == mock_object.to_dict.return_value
+
+
+class TestWorkflowExecutionsCancelExecutionOperator:
+    @mock.patch(BASE_PATH.format("Execution"))
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    def test_execute(self, mock_hook, mock_object):
+        op = WorkflowsCancelExecutionOperator(
+            task_id="test_task",
+            workflow_id=WORKFLOW_ID,
+            execution_id=EXECUTION_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        result = op.execute({})
+
+        mock_hook.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+
+        mock_hook.return_value.cancel_execution.assert_called_once_with(
+            workflow_id=WORKFLOW_ID,
+            execution_id=EXECUTION_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert result == mock_object.to_dict.return_value
+
+
+class TestWorkflowExecutionsListExecutionsOperator:
+    @mock.patch(BASE_PATH.format("Execution"))
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    def test_execute(self, mock_hook, mock_object):
+        execution_mock = mock.MagicMock()
+        execution_mock.start_time = datetime.datetime.now(tz=pytz.UTC) + datetime.timedelta(minutes=5)
+        mock_hook.return_value.list_executions.return_value = [execution_mock]
+
+        op = WorkflowsListExecutionsOperator(
+            task_id="test_task",
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        result = op.execute({})
+
+        mock_hook.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+
+        mock_hook.return_value.list_executions.assert_called_once_with(
+            workflow_id=WORKFLOW_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert result == [mock_object.to_dict.return_value]
+
+
+class TestWorkflowExecutionsGetExecutionOperator:
+    @mock.patch(BASE_PATH.format("Execution"))
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    def test_execute(self, mock_hook, mock_object):
+        op = WorkflowsGetExecutionOperator(
+            task_id="test_task",
+            workflow_id=WORKFLOW_ID,
+            execution_id=EXECUTION_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        result = op.execute({})
+
+        mock_hook.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+
+        mock_hook.return_value.get_execution.assert_called_once_with(
+            workflow_id=WORKFLOW_ID,
+            execution_id=EXECUTION_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert result == mock_object.to_dict.return_value
diff --git a/tests/providers/google/cloud/operators/test_workflows_system.py b/tests/providers/google/cloud/operators/test_workflows_system.py
new file mode 100644
index 0000000..0a768ed
--- /dev/null
+++ b/tests/providers/google/cloud/operators/test_workflows_system.py
@@ -0,0 +1,29 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import pytest
+
+from tests.providers.google.cloud.utils.gcp_authenticator import GCP_WORKFLOWS_KEY
+from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context
+
+
+@pytest.mark.system("google.cloud")
+@pytest.mark.credential_file(GCP_WORKFLOWS_KEY)
+class CloudVisionExampleDagsSystemTest(GoogleSystemTest):
+    @provide_gcp_context(GCP_WORKFLOWS_KEY)
+    def test_run_example_workflow_dag(self):
+        self.run_dag('example_cloud_workflows', CLOUD_DAG_FOLDER)
diff --git a/tests/providers/google/cloud/sensors/test_workflows.py b/tests/providers/google/cloud/sensors/test_workflows.py
new file mode 100644
index 0000000..56ad958
--- /dev/null
+++ b/tests/providers/google/cloud/sensors/test_workflows.py
@@ -0,0 +1,108 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from unittest import mock
+
+import pytest
+from google.cloud.workflows.executions_v1beta import Execution
+
+from airflow.exceptions import AirflowException
+from airflow.providers.google.cloud.sensors.workflows import WorkflowExecutionSensor
+
+BASE_PATH = "airflow.providers.google.cloud.sensors.workflows.{}"
+LOCATION = "europe-west1"
+WORKFLOW_ID = "workflow_id"
+EXECUTION_ID = "execution_id"
+PROJECT_ID = "airflow-testing"
+METADATA = None
+TIMEOUT = None
+RETRY = None
+GCP_CONN_ID = "test-conn"
+IMPERSONATION_CHAIN = None
+
+
+class TestWorkflowExecutionSensor:
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    def test_poke_success(self, mock_hook):
+        mock_hook.return_value.get_execution.return_value = mock.MagicMock(state=Execution.State.SUCCEEDED)
+        op = WorkflowExecutionSensor(
+            task_id="test_task",
+            workflow_id=WORKFLOW_ID,
+            execution_id=EXECUTION_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            request_timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        result = op.poke({})
+
+        mock_hook.assert_called_once_with(
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+
+        mock_hook.return_value.get_execution.assert_called_once_with(
+            workflow_id=WORKFLOW_ID,
+            execution_id=EXECUTION_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            timeout=TIMEOUT,
+            metadata=METADATA,
+        )
+
+        assert result is True
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    def test_poke_wait(self, mock_hook):
+        mock_hook.return_value.get_execution.return_value = mock.MagicMock(state=Execution.State.ACTIVE)
+        op = WorkflowExecutionSensor(
+            task_id="test_task",
+            workflow_id=WORKFLOW_ID,
+            execution_id=EXECUTION_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            request_timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        result = op.poke({})
+
+        assert result is False
+
+    @mock.patch(BASE_PATH.format("WorkflowsHook"))
+    def test_poke_failure(self, mock_hook):
+        mock_hook.return_value.get_execution.return_value = mock.MagicMock(state=Execution.State.FAILED)
+        op = WorkflowExecutionSensor(
+            task_id="test_task",
+            workflow_id=WORKFLOW_ID,
+            execution_id=EXECUTION_ID,
+            location=LOCATION,
+            project_id=PROJECT_ID,
+            retry=RETRY,
+            request_timeout=TIMEOUT,
+            metadata=METADATA,
+            gcp_conn_id=GCP_CONN_ID,
+            impersonation_chain=IMPERSONATION_CHAIN,
+        )
+        with pytest.raises(AirflowException):
+            op.poke({})
diff --git a/tests/providers/google/cloud/utils/gcp_authenticator.py b/tests/providers/google/cloud/utils/gcp_authenticator.py
index bf36ead..2fad48c 100644
--- a/tests/providers/google/cloud/utils/gcp_authenticator.py
+++ b/tests/providers/google/cloud/utils/gcp_authenticator.py
@@ -54,6 +54,7 @@ GCP_SECRET_MANAGER_KEY = 'gcp_secret_manager.json'
 GCP_SPANNER_KEY = 'gcp_spanner.json'
 GCP_STACKDDRIVER = 'gcp_stackdriver.json'
 GCP_TASKS_KEY = 'gcp_tasks.json'
+GCP_WORKFLOWS_KEY = "gcp_workflows.json"
 GMP_KEY = 'gmp.json'
 G_FIREBASE_KEY = 'g_firebase.json'
 GCP_AWS_KEY = 'gcp_aws.json'


[airflow] 38/41: Add Azure Data Factory hook (#11015)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 9ecee99ef439ba8bc7e624bea768e24445e7d841
Author: flvndh <17...@users.noreply.github.com>
AuthorDate: Fri Feb 26 17:28:21 2021 +0100

    Add Azure Data Factory hook (#11015)
    
    fixes #10995
    
    (cherry picked from commit 11d03d2f63d88a284d6aaded5f9ab6642a60561b)
---
 .../microsoft/azure/hooks/azure_data_factory.py    | 716 +++++++++++++++++++++
 airflow/providers/microsoft/azure/provider.yaml    |   8 +
 .../integration-logos/azure/Azure Data Factory.svg |   1 +
 docs/spelling_wordlist.txt                         |   1 +
 setup.py                                           |   1 +
 .../azure/hooks/test_azure_data_factory.py         | 439 +++++++++++++
 6 files changed, 1166 insertions(+)

diff --git a/airflow/providers/microsoft/azure/hooks/azure_data_factory.py b/airflow/providers/microsoft/azure/hooks/azure_data_factory.py
new file mode 100644
index 0000000..d6c686b
--- /dev/null
+++ b/airflow/providers/microsoft/azure/hooks/azure_data_factory.py
@@ -0,0 +1,716 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import inspect
+from functools import wraps
+from typing import Any, Callable, Optional
+
+from azure.mgmt.datafactory import DataFactoryManagementClient
+from azure.mgmt.datafactory.models import (
+    CreateRunResponse,
+    Dataset,
+    DatasetResource,
+    Factory,
+    LinkedService,
+    LinkedServiceResource,
+    PipelineResource,
+    PipelineRun,
+    Trigger,
+    TriggerResource,
+)
+from msrestazure.azure_operation import AzureOperationPoller
+
+from airflow.exceptions import AirflowException
+from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
+
+
+def provide_targeted_factory(func: Callable) -> Callable:
+    """
+    Provide the targeted factory to the decorated function in case it isn't specified.
+
+    If ``resource_group_name`` or ``factory_name`` is not provided it defaults to the value specified in
+    the connection extras.
+    """
+    signature = inspect.signature(func)
+
+    @wraps(func)
+    def wrapper(*args, **kwargs) -> Callable:
+        bound_args = signature.bind(*args, **kwargs)
+
+        def bind_argument(arg, default_key):
+            if arg not in bound_args.arguments:
+                self = args[0]
+                conn = self.get_connection(self.conn_id)
+                default_value = conn.extra_dejson.get(default_key)
+
+                if not default_value:
+                    raise AirflowException("Could not determine the targeted data factory.")
+
+                bound_args.arguments[arg] = conn.extra_dejson[default_key]
+
+        bind_argument("resource_group_name", "resourceGroup")
+        bind_argument("factory_name", "factory")
+
+        return func(*bound_args.args, **bound_args.kwargs)
+
+    return wrapper
+
+
+class AzureDataFactoryHook(AzureBaseHook):  # pylint: disable=too-many-public-methods
+    """
+    A hook to interact with Azure Data Factory.
+
+    :param conn_id: The Azure Data Factory connection id.
+    """
+
+    def __init__(self, conn_id: str = "azure_data_factory_default"):
+        super().__init__(sdk_client=DataFactoryManagementClient, conn_id=conn_id)
+        self._conn: DataFactoryManagementClient = None
+
+    def get_conn(self) -> DataFactoryManagementClient:
+        if not self._conn:
+            self._conn = super().get_conn()
+
+        return self._conn
+
+    @provide_targeted_factory
+    def get_factory(
+        self, resource_group_name: Optional[str] = None, factory_name: Optional[str] = None, **config: Any
+    ) -> Factory:
+        """
+        Get the factory.
+
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :return: The factory.
+        """
+        return self.get_conn().factories.get(resource_group_name, factory_name, **config)
+
+    def _factory_exists(self, resource_group_name, factory_name) -> bool:
+        """Return whether or not the factory already exists."""
+        factories = {
+            factory.name for factory in self.get_conn().factories.list_by_resource_group(resource_group_name)
+        }
+
+        return factory_name in factories
+
+    @provide_targeted_factory
+    def update_factory(
+        self,
+        factory: Factory,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> Factory:
+        """
+        Update the factory.
+
+        :param factory: The factory resource definition.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :raise AirflowException: If the factory does not exist.
+        :return: The factory.
+        """
+        if not self._factory_exists(resource_group_name, factory):
+            raise AirflowException(f"Factory {factory!r} does not exist.")
+
+        return self.get_conn().factories.create_or_update(
+            resource_group_name, factory_name, factory, **config
+        )
+
+    @provide_targeted_factory
+    def create_factory(
+        self,
+        factory: Factory,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> Factory:
+        """
+        Create the factory.
+
+        :param factory: The factory resource definition.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :raise AirflowException: If the factory already exists.
+        :return: The factory.
+        """
+        if self._factory_exists(resource_group_name, factory):
+            raise AirflowException(f"Factory {factory!r} already exists.")
+
+        return self.get_conn().factories.create_or_update(
+            resource_group_name, factory_name, factory, **config
+        )
+
+    @provide_targeted_factory
+    def delete_factory(
+        self, resource_group_name: Optional[str] = None, factory_name: Optional[str] = None, **config: Any
+    ) -> None:
+        """
+        Delete the factory.
+
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        """
+        self.get_conn().factories.delete(resource_group_name, factory_name, **config)
+
+    @provide_targeted_factory
+    def get_linked_service(
+        self,
+        linked_service_name: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> LinkedServiceResource:
+        """
+        Get the linked service.
+
+        :param linked_service_name: The linked service name.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :return: The linked service.
+        """
+        return self.get_conn().linked_services.get(
+            resource_group_name, factory_name, linked_service_name, **config
+        )
+
+    def _linked_service_exists(self, resource_group_name, factory_name, linked_service_name) -> bool:
+        """Return whether or not the linked service already exists."""
+        linked_services = {
+            linked_service.name
+            for linked_service in self.get_conn().linked_services.list_by_factory(
+                resource_group_name, factory_name
+            )
+        }
+
+        return linked_service_name in linked_services
+
+    @provide_targeted_factory
+    def update_linked_service(
+        self,
+        linked_service_name: str,
+        linked_service: LinkedService,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> LinkedServiceResource:
+        """
+        Update the linked service.
+
+        :param linked_service_name: The linked service name.
+        :param linked_service: The linked service resource definition.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :raise AirflowException: If the linked service does not exist.
+        :return: The linked service.
+        """
+        if not self._linked_service_exists(resource_group_name, factory_name, linked_service_name):
+            raise AirflowException(f"Linked service {linked_service_name!r} does not exist.")
+
+        return self.get_conn().linked_services.create_or_update(
+            resource_group_name, factory_name, linked_service_name, linked_service, **config
+        )
+
+    @provide_targeted_factory
+    def create_linked_service(
+        self,
+        linked_service_name: str,
+        linked_service: LinkedService,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> LinkedServiceResource:
+        """
+        Create the linked service.
+
+        :param linked_service_name: The linked service name.
+        :param linked_service: The linked service resource definition.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :raise AirflowException: If the linked service already exists.
+        :return: The linked service.
+        """
+        if self._linked_service_exists(resource_group_name, factory_name, linked_service_name):
+            raise AirflowException(f"Linked service {linked_service_name!r} already exists.")
+
+        return self.get_conn().linked_services.create_or_update(
+            resource_group_name, factory_name, linked_service_name, linked_service, **config
+        )
+
+    @provide_targeted_factory
+    def delete_linked_service(
+        self,
+        linked_service_name: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> None:
+        """
+        Delete the linked service:
+
+        :param linked_service_name: The linked service name.
+        :param resource_group_name: The linked service name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        """
+        self.get_conn().linked_services.delete(
+            resource_group_name, factory_name, linked_service_name, **config
+        )
+
+    @provide_targeted_factory
+    def get_dataset(
+        self,
+        dataset_name: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> DatasetResource:
+        """
+        Get the dataset.
+
+        :param dataset_name: The dataset name.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :return: The dataset.
+        """
+        return self.get_conn().datasets.get(resource_group_name, factory_name, dataset_name, **config)
+
+    def _dataset_exists(self, resource_group_name, factory_name, dataset_name) -> bool:
+        """Return whether or not the dataset already exists."""
+        datasets = {
+            dataset.name
+            for dataset in self.get_conn().datasets.list_by_factory(resource_group_name, factory_name)
+        }
+
+        return dataset_name in datasets
+
+    @provide_targeted_factory
+    def update_dataset(
+        self,
+        dataset_name: str,
+        dataset: Dataset,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> DatasetResource:
+        """
+        Update the dataset.
+
+        :param dataset_name: The dataset name.
+        :param dataset: The dataset resource definition.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :raise AirflowException: If the dataset does not exist.
+        :return: The dataset.
+        """
+        if not self._dataset_exists(resource_group_name, factory_name, dataset_name):
+            raise AirflowException(f"Dataset {dataset_name!r} does not exist.")
+
+        return self.get_conn().datasets.create_or_update(
+            resource_group_name, factory_name, dataset_name, dataset, **config
+        )
+
+    @provide_targeted_factory
+    def create_dataset(
+        self,
+        dataset_name: str,
+        dataset: Dataset,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> DatasetResource:
+        """
+        Create the dataset.
+
+        :param dataset_name: The dataset name.
+        :param dataset: The dataset resource definition.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :raise AirflowException: If the dataset already exists.
+        :return: The dataset.
+        """
+        if self._dataset_exists(resource_group_name, factory_name, dataset_name):
+            raise AirflowException(f"Dataset {dataset_name!r} already exists.")
+
+        return self.get_conn().datasets.create_or_update(
+            resource_group_name, factory_name, dataset_name, dataset, **config
+        )
+
+    @provide_targeted_factory
+    def delete_dataset(
+        self,
+        dataset_name: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> None:
+        """
+        Delete the dataset:
+
+        :param dataset_name: The dataset name.
+        :param resource_group_name: The dataset name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        """
+        self.get_conn().datasets.delete(resource_group_name, factory_name, dataset_name, **config)
+
+    @provide_targeted_factory
+    def get_pipeline(
+        self,
+        pipeline_name: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> PipelineResource:
+        """
+        Get the pipeline.
+
+        :param pipeline_name: The pipeline name.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :return: The pipeline.
+        """
+        return self.get_conn().pipelines.get(resource_group_name, factory_name, pipeline_name, **config)
+
+    def _pipeline_exists(self, resource_group_name, factory_name, pipeline_name) -> bool:
+        """Return whether or not the pipeline already exists."""
+        pipelines = {
+            pipeline.name
+            for pipeline in self.get_conn().pipelines.list_by_factory(resource_group_name, factory_name)
+        }
+
+        return pipeline_name in pipelines
+
+    @provide_targeted_factory
+    def update_pipeline(
+        self,
+        pipeline_name: str,
+        pipeline: PipelineResource,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> PipelineResource:
+        """
+        Update the pipeline.
+
+        :param pipeline_name: The pipeline name.
+        :param pipeline: The pipeline resource definition.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :raise AirflowException: If the pipeline does not exist.
+        :return: The pipeline.
+        """
+        if not self._pipeline_exists(resource_group_name, factory_name, pipeline_name):
+            raise AirflowException(f"Pipeline {pipeline_name!r} does not exist.")
+
+        return self.get_conn().pipelines.create_or_update(
+            resource_group_name, factory_name, pipeline_name, pipeline, **config
+        )
+
+    @provide_targeted_factory
+    def create_pipeline(
+        self,
+        pipeline_name: str,
+        pipeline: PipelineResource,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> PipelineResource:
+        """
+        Create the pipeline.
+
+        :param pipeline_name: The pipeline name.
+        :param pipeline: The pipeline resource definition.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :raise AirflowException: If the pipeline already exists.
+        :return: The pipeline.
+        """
+        if self._pipeline_exists(resource_group_name, factory_name, pipeline_name):
+            raise AirflowException(f"Pipeline {pipeline_name!r} already exists.")
+
+        return self.get_conn().pipelines.create_or_update(
+            resource_group_name, factory_name, pipeline_name, pipeline, **config
+        )
+
+    @provide_targeted_factory
+    def delete_pipeline(
+        self,
+        pipeline_name: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> None:
+        """
+        Delete the pipeline:
+
+        :param pipeline_name: The pipeline name.
+        :param resource_group_name: The pipeline name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        """
+        self.get_conn().pipelines.delete(resource_group_name, factory_name, pipeline_name, **config)
+
+    @provide_targeted_factory
+    def run_pipeline(
+        self,
+        pipeline_name: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> CreateRunResponse:
+        """
+        Run a pipeline.
+
+        :param pipeline_name: The pipeline name.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :return: The pipeline run.
+        """
+        return self.get_conn().pipelines.create_run(
+            resource_group_name, factory_name, pipeline_name, **config
+        )
+
+    @provide_targeted_factory
+    def get_pipeline_run(
+        self,
+        run_id: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> PipelineRun:
+        """
+        Get the pipeline run.
+
+        :param run_id: The pipeline run identifier.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :return: The pipeline run.
+        """
+        return self.get_conn().pipeline_runs.get(resource_group_name, factory_name, run_id, **config)
+
+    @provide_targeted_factory
+    def cancel_pipeline_run(
+        self,
+        run_id: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> None:
+        """
+        Cancel the pipeline run.
+
+        :param run_id: The pipeline run identifier.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        """
+        self.get_conn().pipeline_runs.cancel(resource_group_name, factory_name, run_id, **config)
+
+    @provide_targeted_factory
+    def get_trigger(
+        self,
+        trigger_name: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> TriggerResource:
+        """
+        Get the trigger.
+
+        :param trigger_name: The trigger name.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :return: The trigger.
+        """
+        return self.get_conn().triggers.get(resource_group_name, factory_name, trigger_name, **config)
+
+    def _trigger_exists(self, resource_group_name, factory_name, trigger_name) -> bool:
+        """Return whether or not the trigger already exists."""
+        triggers = {
+            trigger.name
+            for trigger in self.get_conn().triggers.list_by_factory(resource_group_name, factory_name)
+        }
+
+        return trigger_name in triggers
+
+    @provide_targeted_factory
+    def update_trigger(
+        self,
+        trigger_name: str,
+        trigger: Trigger,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> TriggerResource:
+        """
+        Update the trigger.
+
+        :param trigger_name: The trigger name.
+        :param trigger: The trigger resource definition.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :raise AirflowException: If the trigger does not exist.
+        :return: The trigger.
+        """
+        if not self._trigger_exists(resource_group_name, factory_name, trigger_name):
+            raise AirflowException(f"Trigger {trigger_name!r} does not exist.")
+
+        return self.get_conn().triggers.create_or_update(
+            resource_group_name, factory_name, trigger_name, trigger, **config
+        )
+
+    @provide_targeted_factory
+    def create_trigger(
+        self,
+        trigger_name: str,
+        trigger: Trigger,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> TriggerResource:
+        """
+        Create the trigger.
+
+        :param trigger_name: The trigger name.
+        :param trigger: The trigger resource definition.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :raise AirflowException: If the trigger already exists.
+        :return: The trigger.
+        """
+        if self._trigger_exists(resource_group_name, factory_name, trigger_name):
+            raise AirflowException(f"Trigger {trigger_name!r} already exists.")
+
+        return self.get_conn().triggers.create_or_update(
+            resource_group_name, factory_name, trigger_name, trigger, **config
+        )
+
+    @provide_targeted_factory
+    def delete_trigger(
+        self,
+        trigger_name: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> None:
+        """
+        Delete the trigger.
+
+        :param trigger_name: The trigger name.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        """
+        self.get_conn().triggers.delete(resource_group_name, factory_name, trigger_name, **config)
+
+    @provide_targeted_factory
+    def start_trigger(
+        self,
+        trigger_name: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> AzureOperationPoller:
+        """
+        Start the trigger.
+
+        :param trigger_name: The trigger name.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :return: An Azure operation poller.
+        """
+        return self.get_conn().triggers.start(resource_group_name, factory_name, trigger_name, **config)
+
+    @provide_targeted_factory
+    def stop_trigger(
+        self,
+        trigger_name: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> AzureOperationPoller:
+        """
+        Stop the trigger.
+
+        :param trigger_name: The trigger name.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        :return: An Azure operation poller.
+        """
+        return self.get_conn().triggers.stop(resource_group_name, factory_name, trigger_name, **config)
+
+    @provide_targeted_factory
+    def rerun_trigger(
+        self,
+        trigger_name: str,
+        run_id: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> None:
+        """
+        Rerun the trigger.
+
+        :param trigger_name: The trigger name.
+        :param run_id: The trigger run identifier.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        """
+        return self.get_conn().trigger_runs.rerun(
+            resource_group_name, factory_name, trigger_name, run_id, **config
+        )
+
+    @provide_targeted_factory
+    def cancel_trigger(
+        self,
+        trigger_name: str,
+        run_id: str,
+        resource_group_name: Optional[str] = None,
+        factory_name: Optional[str] = None,
+        **config: Any,
+    ) -> None:
+        """
+        Cancel the trigger.
+
+        :param trigger_name: The trigger name.
+        :param run_id: The trigger run identifier.
+        :param resource_group_name: The resource group name.
+        :param factory_name: The factory name.
+        :param config: Extra parameters for the ADF client.
+        """
+        self.get_conn().trigger_runs.cancel(resource_group_name, factory_name, trigger_name, run_id, **config)
diff --git a/airflow/providers/microsoft/azure/provider.yaml b/airflow/providers/microsoft/azure/provider.yaml
index fa0d112..da7b330 100644
--- a/airflow/providers/microsoft/azure/provider.yaml
+++ b/airflow/providers/microsoft/azure/provider.yaml
@@ -54,6 +54,10 @@ integrations:
   - integration-name: Microsoft Azure FileShare
     external-doc-url: https://cloud.google.com/storage/
     tags: [azure]
+  - integration-name: Microsoft Azure Data Factory
+    external-doc-url: https://azure.microsoft.com/en-us/services/data-factory/
+    logo: /integration-logos/azure/Azure Data Factory.svg
+    tags: [azure]
   - integration-name: Microsoft Azure
     external-doc-url: https://azure.microsoft.com/
     tags: [azure]
@@ -113,6 +117,9 @@ hooks:
   - integration-name: Microsoft Azure Blob Storage
     python-modules:
       - airflow.providers.microsoft.azure.hooks.wasb
+  - integration-name: Microsoft Azure Data Factory
+    python-modules:
+      - airflow.providers.microsoft.azure.hooks.azure_data_factory
 
 transfers:
   - source-integration-name: Local
@@ -138,3 +145,4 @@ hook-class-names:
   - airflow.providers.microsoft.azure.hooks.azure_data_lake.AzureDataLakeHook
   - airflow.providers.microsoft.azure.hooks.azure_container_instance.AzureContainerInstanceHook
   - airflow.providers.microsoft.azure.hooks.wasb.WasbHook
+  - airflow.providers.microsoft.azure.hooks.azure_data_factory.AzureDataFactoryHook
diff --git a/docs/integration-logos/azure/Azure Data Factory.svg b/docs/integration-logos/azure/Azure Data Factory.svg
new file mode 100644
index 0000000..481d3d4
--- /dev/null
+++ b/docs/integration-logos/azure/Azure Data Factory.svg	
@@ -0,0 +1 @@
+<svg id="f9ed9690-6753-43a7-8b32-d66ac7b8a99a" xmlns="http://www.w3.org/2000/svg" width="18" height="18" viewBox="0 0 18 18"><defs><linearGradient id="f710a364-083f-494c-9d96-89b92ee2d5a8" x1="0.5" y1="9.77" x2="9" y2="9.77" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#005ba1" /><stop offset="0.07" stop-color="#0060a9" /><stop offset="0.36" stop-color="#0071c8" /><stop offset="0.52" stop-color="#0078d4" /><stop offset="0.64" stop-color="#0074cd" /><stop offset="0.81" stop [...]
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index 0e89285..238021e 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -1062,6 +1062,7 @@ png
 podName
 podSpec
 podspec
+poller
 polyfill
 postMessage
 postfix
diff --git a/setup.py b/setup.py
index 4ee7a5c..0846ec9 100644
--- a/setup.py
+++ b/setup.py
@@ -217,6 +217,7 @@ azure = [
     'azure-keyvault>=4.1.0',
     'azure-kusto-data>=0.0.43,<0.1',
     'azure-mgmt-containerinstance>=1.5.0,<2.0',
+    'azure-mgmt-datafactory>=0.13.0',
     'azure-mgmt-datalake-store>=0.5.0',
     'azure-mgmt-resource>=2.2.0',
     'azure-storage-blob>=12.7.0',
diff --git a/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py
new file mode 100644
index 0000000..ea445ec
--- /dev/null
+++ b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py
@@ -0,0 +1,439 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# pylint: disable=redefined-outer-name,unused-argument
+
+import json
+from unittest.mock import MagicMock, Mock
+
+import pytest
+from pytest import fixture
+
+from airflow.exceptions import AirflowException
+from airflow.models.connection import Connection
+from airflow.providers.microsoft.azure.hooks.azure_data_factory import (
+    AzureDataFactoryHook,
+    provide_targeted_factory,
+)
+from airflow.utils import db
+
+DEFAULT_RESOURCE_GROUP = "defaultResourceGroup"
+RESOURCE_GROUP = "testResourceGroup"
+
+DEFAULT_FACTORY = "defaultFactory"
+FACTORY = "testFactory"
+
+MODEL = object()
+NAME = "testName"
+ID = "testId"
+
+
+def setup_module():
+    connection = Connection(
+        conn_id="azure_data_factory_test",
+        conn_type="azure_data_factory",
+        login="clientId",
+        password="clientSecret",
+        extra=json.dumps(
+            {
+                "tenantId": "tenantId",
+                "subscriptionId": "subscriptionId",
+                "resourceGroup": DEFAULT_RESOURCE_GROUP,
+                "factory": DEFAULT_FACTORY,
+            }
+        ),
+    )
+
+    db.merge_conn(connection)
+
+
+@fixture
+def hook():
+    client = AzureDataFactoryHook(conn_id="azure_data_factory_test")
+    client._conn = MagicMock(
+        spec=[
+            "factories",
+            "linked_services",
+            "datasets",
+            "pipelines",
+            "pipeline_runs",
+            "triggers",
+            "trigger_runs",
+        ]
+    )
+
+    return client
+
+
+def parametrize(explicit_factory, implicit_factory):
+    def wrapper(func):
+        return pytest.mark.parametrize(
+            ("user_args", "sdk_args"),
+            (explicit_factory, implicit_factory),
+            ids=("explicit factory", "implicit factory"),
+        )(func)
+
+    return wrapper
+
+
+def test_provide_targeted_factory():
+    def echo(_, resource_group_name=None, factory_name=None):
+        return resource_group_name, factory_name
+
+    conn = MagicMock()
+    hook = MagicMock()
+    hook.get_connection.return_value = conn
+
+    conn.extra_dejson = {}
+    assert provide_targeted_factory(echo)(hook, RESOURCE_GROUP, FACTORY) == (RESOURCE_GROUP, FACTORY)
+
+    conn.extra_dejson = {"resourceGroup": DEFAULT_RESOURCE_GROUP, "factory": DEFAULT_FACTORY}
+    assert provide_targeted_factory(echo)(hook) == (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY)
+
+    with pytest.raises(AirflowException):
+        conn.extra_dejson = {}
+        provide_targeted_factory(echo)(hook)
+
+
+@parametrize(
+    explicit_factory=((RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY)),
+    implicit_factory=((), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY)),
+)
+def test_get_factory(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.get_factory(*user_args)
+
+    hook._conn.factories.get.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, MODEL)),
+    implicit_factory=((MODEL,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, MODEL)),
+)
+def test_create_factory(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.create_factory(*user_args)
+
+    hook._conn.factories.create_or_update.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, MODEL)),
+    implicit_factory=((MODEL,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, MODEL)),
+)
+def test_update_factory(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook._factory_exists = Mock(return_value=True)
+    hook.update_factory(*user_args)
+
+    hook._conn.factories.create_or_update.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, MODEL)),
+    implicit_factory=((MODEL,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, MODEL)),
+)
+def test_update_factory_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook._factory_exists = Mock(return_value=False)
+
+    with pytest.raises(AirflowException, match=r"Factory .+ does not exist"):
+        hook.update_factory(*user_args)
+
+
+@parametrize(
+    explicit_factory=((RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY)),
+    implicit_factory=((), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY)),
+)
+def test_delete_factory(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.delete_factory(*user_args)
+
+    hook._conn.factories.delete.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)),
+    implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
+)
+def test_get_linked_service(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.get_linked_service(*user_args)
+
+    hook._conn.linked_services.get.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_create_linked_service(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.create_linked_service(*user_args)
+
+    hook._conn.linked_services.create_or_update(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_update_linked_service(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook._linked_service_exists = Mock(return_value=True)
+    hook.update_linked_service(*user_args)
+
+    hook._conn.linked_services.create_or_update(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_update_linked_service_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook._linked_service_exists = Mock(return_value=False)
+
+    with pytest.raises(AirflowException, match=r"Linked service .+ does not exist"):
+        hook.update_linked_service(*user_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)),
+    implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
+)
+def test_delete_linked_service(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.delete_linked_service(*user_args)
+
+    hook._conn.linked_services.delete.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)),
+    implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
+)
+def test_get_dataset(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.get_dataset(*user_args)
+
+    hook._conn.datasets.get.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_create_dataset(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.create_dataset(*user_args)
+
+    hook._conn.datasets.create_or_update.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_update_dataset(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook._dataset_exists = Mock(return_value=True)
+    hook.update_dataset(*user_args)
+
+    hook._conn.datasets.create_or_update.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_update_dataset_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook._dataset_exists = Mock(return_value=False)
+
+    with pytest.raises(AirflowException, match=r"Dataset .+ does not exist"):
+        hook.update_dataset(*user_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)),
+    implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
+)
+def test_delete_dataset(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.delete_dataset(*user_args)
+
+    hook._conn.datasets.delete.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)),
+    implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
+)
+def test_get_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.get_pipeline(*user_args)
+
+    hook._conn.pipelines.get.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_create_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.create_pipeline(*user_args)
+
+    hook._conn.pipelines.create_or_update.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_update_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook._pipeline_exists = Mock(return_value=True)
+    hook.update_pipeline(*user_args)
+
+    hook._conn.pipelines.create_or_update.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_update_pipeline_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook._pipeline_exists = Mock(return_value=False)
+
+    with pytest.raises(AirflowException, match=r"Pipeline .+ does not exist"):
+        hook.update_pipeline(*user_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)),
+    implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
+)
+def test_delete_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.delete_pipeline(*user_args)
+
+    hook._conn.pipelines.delete.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)),
+    implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
+)
+def test_run_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.run_pipeline(*user_args)
+
+    hook._conn.pipelines.create_run.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((ID, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, ID)),
+    implicit_factory=((ID,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, ID)),
+)
+def test_get_pipeline_run(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.get_pipeline_run(*user_args)
+
+    hook._conn.pipeline_runs.get.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((ID, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, ID)),
+    implicit_factory=((ID,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, ID)),
+)
+def test_cancel_pipeline_run(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.cancel_pipeline_run(*user_args)
+
+    hook._conn.pipeline_runs.cancel.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)),
+    implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
+)
+def test_get_trigger(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.get_trigger(*user_args)
+
+    hook._conn.triggers.get.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_create_trigger(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.create_trigger(*user_args)
+
+    hook._conn.triggers.create_or_update.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_update_trigger(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook._trigger_exists = Mock(return_value=True)
+    hook.update_trigger(*user_args)
+
+    hook._conn.triggers.create_or_update.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)),
+    implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)),
+)
+def test_update_trigger_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook._trigger_exists = Mock(return_value=False)
+
+    with pytest.raises(AirflowException, match=r"Trigger .+ does not exist"):
+        hook.update_trigger(*user_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)),
+    implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
+)
+def test_delete_trigger(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.delete_trigger(*user_args)
+
+    hook._conn.triggers.delete.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)),
+    implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
+)
+def test_start_trigger(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.start_trigger(*user_args)
+
+    hook._conn.triggers.start.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)),
+    implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
+)
+def test_stop_trigger(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.stop_trigger(*user_args)
+
+    hook._conn.triggers.stop.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, ID, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, ID)),
+    implicit_factory=((NAME, ID), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, ID)),
+)
+def test_rerun_trigger(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.rerun_trigger(*user_args)
+
+    hook._conn.trigger_runs.rerun.assert_called_with(*sdk_args)
+
+
+@parametrize(
+    explicit_factory=((NAME, ID, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, ID)),
+    implicit_factory=((NAME, ID), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, ID)),
+)
+def test_cancel_trigger(hook: AzureDataFactoryHook, user_args, sdk_args):
+    hook.cancel_trigger(*user_args)
+
+    hook._conn.trigger_runs.cancel.assert_called_with(*sdk_args)


[airflow] 32/41: Update to Pytest 6.0 (#14065)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit df99938edfbf0b2804de4cef16a95364dd682f2a
Author: Ash Berlin-Taylor <as...@firemirror.com>
AuthorDate: Thu Feb 4 12:57:51 2021 +0000

    Update to Pytest 6.0 (#14065)
    
    And pytest 6 removed a class that the rerunfailures plugin was using, so
    we have to upgrade that too.
    
    (cherry picked from commit 10c026cb7a7189d9573f30f2f2242f0f76842a72)
---
 setup.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setup.py b/setup.py
index 7beb684..cd38ef2 100644
--- a/setup.py
+++ b/setup.py
@@ -506,10 +506,10 @@ devel = [
     'pre-commit',
     'pylint',
     'pysftp',
-    'pytest',
+    'pytest~=6.0',
     'pytest-cov',
     'pytest-instafail',
-    'pytest-rerunfailures',
+    'pytest-rerunfailures~=9.1',
     'pytest-timeouts',
     'pytest-xdist',
     'pywinrm',


[airflow] 16/41: Upgrade slack_sdk to v3 (#13745)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 36a7383a30b0f8624e0e4fbfff1971799b8b0367
Author: Jyoti Dhiman <36...@users.noreply.github.com>
AuthorDate: Tue Jan 26 02:43:48 2021 +0530

    Upgrade slack_sdk to v3 (#13745)
    
    Co-authored-by: Kamil Breguła <ka...@polidea.com>
    Co-authored-by: Kamil Breguła <mi...@users.noreply.github.com>
    (cherry picked from commit 283945001363d8f492fbd25f2765d39fa06d757a)
---
 airflow/providers/slack/ADDITIONAL_INFO.md         | 25 ++++++++++++++++++++++
 .../providers/slack/BACKPORT_PROVIDER_README.md    |  2 +-
 airflow/providers/slack/README.md                  |  2 +-
 airflow/providers/slack/hooks/slack.py             |  4 ++--
 docs/conf.py                                       |  2 +-
 docs/spelling_wordlist.txt                         |  1 +
 scripts/ci/libraries/_verify_image.sh              |  2 +-
 setup.py                                           |  2 +-
 tests/providers/slack/hooks/test_slack.py          |  2 +-
 9 files changed, 34 insertions(+), 8 deletions(-)

diff --git a/airflow/providers/slack/ADDITIONAL_INFO.md b/airflow/providers/slack/ADDITIONAL_INFO.md
new file mode 100644
index 0000000..9b05d8a
--- /dev/null
+++ b/airflow/providers/slack/ADDITIONAL_INFO.md
@@ -0,0 +1,25 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+ -->
+
+# Migration Guide
+
+## 2.0.0
+
+We updated the support for `slack_sdk` from ``>=2.0.0,<3.0.0`` to ``>=3.0.0,<4.0.0``. In most cases, this doesn't mean any breaking changes to the DAG files, but if you used this library directly then you have to make the changes.
+For details, see [the Migration Guide](https://slack.dev/python-slack-sdk/v3-migration/index.html#from-slackclient-2-x) for Python Slack SDK.
diff --git a/airflow/providers/slack/BACKPORT_PROVIDER_README.md b/airflow/providers/slack/BACKPORT_PROVIDER_README.md
index 7863eb4..0e20d06 100644
--- a/airflow/providers/slack/BACKPORT_PROVIDER_README.md
+++ b/airflow/providers/slack/BACKPORT_PROVIDER_README.md
@@ -60,7 +60,7 @@ You can install this package on top of an existing airflow 1.10.* installation v
 
 | PIP package   | Version required   |
 |:--------------|:-------------------|
-| slackclient   | &gt;=2.0.0,&lt;3.0.0     |
+| slack_sdk   | &gt;=3.0.0,&lt;4.0.0     |
 
 ## Cross provider package dependencies
 
diff --git a/airflow/providers/slack/README.md b/airflow/providers/slack/README.md
index 7a630c6..ea4968a 100644
--- a/airflow/providers/slack/README.md
+++ b/airflow/providers/slack/README.md
@@ -61,7 +61,7 @@ You can install this package on top of an existing airflow 2.* installation via
 
 | PIP package   | Version required   |
 |:--------------|:-------------------|
-| slackclient   | &gt;=2.0.0,&lt;3.0.0     |
+| slack_sdk   | &gt;=3.0.0,&lt;4.0.0     |
 
 ## Cross provider package dependencies
 
diff --git a/airflow/providers/slack/hooks/slack.py b/airflow/providers/slack/hooks/slack.py
index 6f27091..da449a7 100644
--- a/airflow/providers/slack/hooks/slack.py
+++ b/airflow/providers/slack/hooks/slack.py
@@ -18,7 +18,7 @@
 """Hook for Slack"""
 from typing import Any, Optional
 
-from slack import WebClient
+from slack_sdk import WebClient
 
 from airflow.exceptions import AirflowException
 from airflow.hooks.base import BaseHook
@@ -41,7 +41,7 @@ class SlackHook(BaseHook):  # noqa
         slack_hook.call("chat.postMessage", json={"channel": "#random", "text": "Hello world!"})
 
         # Call method from Slack SDK (you have to handle errors yourself)
-        #  For more details check https://slack.dev/python-slackclient/basic_usage.html#sending-a-message
+        #  For more details check https://slack.dev/python-slack-sdk/web/index.html#messaging
         slack_hook.client.chat_postMessage(channel="#random", text="Hello world!")
 
     :param token: Slack API token
diff --git a/docs/conf.py b/docs/conf.py
index a60bbe3..411796c 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -391,7 +391,7 @@ autodoc_mock_imports = [
     'qds_sdk',
     'redis',
     'simple_salesforce',
-    'slackclient',
+    'slack_sdk',
     'smbclient',
     'snowflake',
     'sshtunnel',
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index f8f8f83..71f9e34 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -1204,6 +1204,7 @@ skipable
 sku
 sla
 slackclient
+slack_sdk
 slas
 smtp
 sortable
diff --git a/scripts/ci/libraries/_verify_image.sh b/scripts/ci/libraries/_verify_image.sh
index 2092bd2..05e91c6 100644
--- a/scripts/ci/libraries/_verify_image.sh
+++ b/scripts/ci/libraries/_verify_image.sh
@@ -190,7 +190,7 @@ function verify_image::verify_production_image_python_modules() {
     verify_image::check_command "Import: redis" "python -c 'import redis'"
     verify_image::check_command "Import: sendgrid" "python -c 'import sendgrid'"
     verify_image::check_command "Import: sftp/ssh" "python -c 'import paramiko, pysftp, sshtunnel'"
-    verify_image::check_command "Import: slack" "python -c 'import slack'"
+    verify_image::check_command "Import: slack" "python -c 'import slack_sdk'"
     verify_image::check_command "Import: statsd" "python -c 'import statsd'"
     verify_image::check_command "Import: virtualenv" "python -c 'import virtualenv'"
 
diff --git a/setup.py b/setup.py
index 50f6a2f..0689bd5 100644
--- a/setup.py
+++ b/setup.py
@@ -417,7 +417,7 @@ sentry = [
 ]
 singularity = ['spython>=0.0.56']
 slack = [
-    'slackclient>=2.0.0,<3.0.0',
+    'slack_sdk>=3.0.0,<4.0.0',
 ]
 snowflake = [
     # The `azure` provider uses legacy `azure-storage` library, where `snowflake` uses the
diff --git a/tests/providers/slack/hooks/test_slack.py b/tests/providers/slack/hooks/test_slack.py
index cbe3d26..5fef409 100644
--- a/tests/providers/slack/hooks/test_slack.py
+++ b/tests/providers/slack/hooks/test_slack.py
@@ -20,7 +20,7 @@ import unittest
 from unittest import mock
 
 import pytest
-from slack.errors import SlackApiError
+from slack_sdk.errors import SlackApiError
 
 from airflow.exceptions import AirflowException
 from airflow.providers.slack.hooks.slack import SlackHook


[airflow] 14/41: Fix grammar in production-deployment.rst (#14386)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit aa50ae45fd92d0d5894be87591f333be43f39705
Author: Jon Quinn <jo...@gmail.com>
AuthorDate: Tue Feb 23 13:31:38 2021 +0000

    Fix grammar in production-deployment.rst (#14386)
    
    (cherry picked from commit 4fb943c21425f055e555a95ef9e4f7ba4690ee8b)
---
 docs/apache-airflow/production-deployment.rst | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/apache-airflow/production-deployment.rst b/docs/apache-airflow/production-deployment.rst
index 439afe1..042b655 100644
--- a/docs/apache-airflow/production-deployment.rst
+++ b/docs/apache-airflow/production-deployment.rst
@@ -56,9 +56,9 @@ Once that is done, you can run -
 Multi-Node Cluster
 ==================
 
-Airflow uses :class:`~airflow.executors.sequential_executor.SequentialExecutor` by default. However, by it
+Airflow uses :class:`~airflow.executors.sequential_executor.SequentialExecutor` by default. However, by its
 nature, the user is limited to executing at most one task at a time. ``Sequential Executor`` also pauses
-the scheduler when it runs a task, hence not recommended in a production setup. You should use the
+the scheduler when it runs a task, hence it is not recommended in a production setup. You should use the
 :class:`~airflow.executors.local_executor.LocalExecutor` for a single machine.
 For a multi-node setup, you should use the :doc:`Kubernetes executor <../executor/kubernetes>` or
 the :doc:`Celery executor <../executor/celery>`.


[airflow] 15/41: Add Apache Beam operators (#12814)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit f394df332bfabf2ddcf08e50facf92b610639c8d
Author: Tobiasz Kędzierski <to...@polidea.com>
AuthorDate: Wed Feb 3 21:34:01 2021 +0100

    Add Apache Beam operators (#12814)
    
    (cherry picked from commit 1872d8719d24f94aeb1dcba9694837070b9884ca)
---
 CONTRIBUTING.rst                                   |  17 +-
 INSTALL                                            |  14 +-
 .../apache/beam/BACKPORT_PROVIDER_README.md        |  99 +++
 airflow/providers/apache/beam/CHANGELOG.rst        |  25 +
 airflow/providers/apache/beam/README.md            |  97 +++
 airflow/providers/apache/beam/__init__.py          |  17 +
 .../providers/apache/beam/example_dags/__init__.py |  17 +
 .../apache/beam/example_dags/example_beam.py       | 315 +++++++++
 airflow/providers/apache/beam/hooks/__init__.py    |  17 +
 airflow/providers/apache/beam/hooks/beam.py        | 289 ++++++++
 .../providers/apache/beam/operators/__init__.py    |  17 +
 airflow/providers/apache/beam/operators/beam.py    | 446 ++++++++++++
 airflow/providers/apache/beam/provider.yaml        |  45 ++
 airflow/providers/dependencies.json                |   4 +
 airflow/providers/google/cloud/hooks/dataflow.py   | 330 ++++-----
 .../providers/google/cloud/operators/dataflow.py   | 331 +++++++--
 .../copy_provider_package_sources.py               |  62 ++
 dev/provider_packages/prepare_provider_packages.py |   4 +-
 .../apache-airflow-providers-apache-beam/index.rst |  36 +
 .../operators.rst                                  | 116 ++++
 docs/apache-airflow/extra-packages-ref.rst         |   2 +
 docs/spelling_wordlist.txt                         |   2 +
 .../run_install_and_test_provider_packages.sh      |   2 +-
 setup.py                                           |   1 +
 tests/core/test_providers_manager.py               |   1 +
 tests/providers/apache/beam/__init__.py            |  16 +
 tests/providers/apache/beam/hooks/__init__.py      |  16 +
 tests/providers/apache/beam/hooks/test_beam.py     | 271 ++++++++
 tests/providers/apache/beam/operators/__init__.py  |  16 +
 tests/providers/apache/beam/operators/test_beam.py | 274 ++++++++
 .../apache/beam/operators/test_beam_system.py      |  47 ++
 .../providers/google/cloud/hooks/test_dataflow.py  | 760 ++++++++++++---------
 .../google/cloud/operators/test_dataflow.py        | 223 ++++--
 .../google/cloud/operators/test_mlengine_utils.py  |  30 +-
 34 files changed, 3263 insertions(+), 696 deletions(-)

diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 6d0e224..0a6f381 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -572,13 +572,13 @@ This is the full list of those extras:
 
   .. START EXTRAS HERE
 
-all, all_dbs, amazon, apache.atlas, apache.cassandra, apache.druid, apache.hdfs, apache.hive,
-apache.kylin, apache.livy, apache.pig, apache.pinot, apache.spark, apache.sqoop, apache.webhdfs,
-async, atlas, aws, azure, cassandra, celery, cgroups, cloudant, cncf.kubernetes, crypto, dask,
-databricks, datadog, devel, devel_all, devel_ci, devel_hadoop, dingding, discord, doc, docker,
-druid, elasticsearch, exasol, facebook, ftp, gcp, gcp_api, github_enterprise, google, google_auth,
-grpc, hashicorp, hdfs, hive, http, imap, jdbc, jenkins, jira, kerberos, kubernetes, ldap,
-microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, mysql, neo4j, odbc, openfaas,
+all, all_dbs, amazon, apache.atlas, apache.beam, apache.cassandra, apache.druid, apache.hdfs,
+apache.hive, apache.kylin, apache.livy, apache.pig, apache.pinot, apache.spark, apache.sqoop,
+apache.webhdfs, async, atlas, aws, azure, cassandra, celery, cgroups, cloudant, cncf.kubernetes,
+crypto, dask, databricks, datadog, devel, devel_all, devel_ci, devel_hadoop, dingding, discord, doc,
+docker, druid, elasticsearch, exasol, facebook, ftp, gcp, gcp_api, github_enterprise, google,
+google_auth, grpc, hashicorp, hdfs, hive, http, imap, jdbc, jenkins, jira, kerberos, kubernetes,
+ldap, microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, mysql, neo4j, odbc, openfaas,
 opsgenie, oracle, pagerduty, papermill, password, pinot, plexus, postgres, presto, qds, qubole,
 rabbitmq, redis, s3, salesforce, samba, segment, sendgrid, sentry, sftp, singularity, slack,
 snowflake, spark, sqlite, ssh, statsd, tableau, telegram, vertica, virtualenv, webhdfs, winrm,
@@ -641,12 +641,13 @@ Here is the list of packages and their extras:
 Package                    Extras
 ========================== ===========================
 amazon                     apache.hive,google,imap,mongo,mysql,postgres,ssh
+apache.beam                google
 apache.druid               apache.hive
 apache.hive                amazon,microsoft.mssql,mysql,presto,samba,vertica
 apache.livy                http
 dingding                   http
 discord                    http
-google                     amazon,apache.cassandra,cncf.kubernetes,facebook,microsoft.azure,microsoft.mssql,mysql,postgres,presto,salesforce,sftp,ssh
+google                     amazon,apache.beam,apache.cassandra,cncf.kubernetes,facebook,microsoft.azure,microsoft.mssql,mysql,postgres,presto,salesforce,sftp,ssh
 hashicorp                  google
 microsoft.azure            google,oracle
 microsoft.mssql            odbc
diff --git a/INSTALL b/INSTALL
index e1ef456..d175aa1 100644
--- a/INSTALL
+++ b/INSTALL
@@ -97,13 +97,13 @@ The list of available extras:
 
 # START EXTRAS HERE
 
-all, all_dbs, amazon, apache.atlas, apache.cassandra, apache.druid, apache.hdfs, apache.hive,
-apache.kylin, apache.livy, apache.pig, apache.pinot, apache.spark, apache.sqoop, apache.webhdfs,
-async, atlas, aws, azure, cassandra, celery, cgroups, cloudant, cncf.kubernetes, crypto, dask,
-databricks, datadog, devel, devel_all, devel_ci, devel_hadoop, dingding, discord, doc, docker,
-druid, elasticsearch, exasol, facebook, ftp, gcp, gcp_api, github_enterprise, google, google_auth,
-grpc, hashicorp, hdfs, hive, http, imap, jdbc, jenkins, jira, kerberos, kubernetes, ldap,
-microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, mysql, neo4j, odbc, openfaas,
+all, all_dbs, amazon, apache.atlas, apache.beam, apache.cassandra, apache.druid, apache.hdfs,
+apache.hive, apache.kylin, apache.livy, apache.pig, apache.pinot, apache.spark, apache.sqoop,
+apache.webhdfs, async, atlas, aws, azure, cassandra, celery, cgroups, cloudant, cncf.kubernetes,
+crypto, dask, databricks, datadog, devel, devel_all, devel_ci, devel_hadoop, dingding, discord, doc,
+docker, druid, elasticsearch, exasol, facebook, ftp, gcp, gcp_api, github_enterprise, google,
+google_auth, grpc, hashicorp, hdfs, hive, http, imap, jdbc, jenkins, jira, kerberos, kubernetes,
+ldap, microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, mysql, neo4j, odbc, openfaas,
 opsgenie, oracle, pagerduty, papermill, password, pinot, plexus, postgres, presto, qds, qubole,
 rabbitmq, redis, s3, salesforce, samba, segment, sendgrid, sentry, sftp, singularity, slack,
 snowflake, spark, sqlite, ssh, statsd, tableau, telegram, vertica, virtualenv, webhdfs, winrm,
diff --git a/airflow/providers/apache/beam/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/beam/BACKPORT_PROVIDER_README.md
new file mode 100644
index 0000000..d0908b6
--- /dev/null
+++ b/airflow/providers/apache/beam/BACKPORT_PROVIDER_README.md
@@ -0,0 +1,99 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+ -->
+
+
+# Package apache-airflow-backport-providers-apache-beam
+
+Release:
+
+**Table of contents**
+
+- [Backport package](#backport-package)
+- [Installation](#installation)
+- [PIP requirements](#pip-requirements)
+- [Cross provider package dependencies](#cross-provider-package-dependencies)
+- [Provider class summary](#provider-classes-summary)
+    - [Operators](#operators)
+        - [Moved operators](#moved-operators)
+    - [Transfer operators](#transfer-operators)
+        - [Moved transfer operators](#moved-transfer-operators)
+    - [Hooks](#hooks)
+        - [Moved hooks](#moved-hooks)
+- [Releases](#releases)
+    - [Release](#release)
+
+## Backport package
+
+This is a backport providers package for `apache.beam` provider. All classes for this provider package
+are in `airflow.providers.apache.beam` python package.
+
+**Only Python 3.6+ is supported for this backport package.**
+
+While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you
+want to use this backport package.
+
+
+## Installation
+
+You can install this package on top of an existing airflow 1.10.* installation via
+`pip install apache-airflow-backport-providers-apache-beam`
+
+## Cross provider package dependencies
+
+Those are dependencies that might be needed in order to use all the features of the package.
+You need to install the specified backport providers package in order to use them.
+
+You can install such cross-provider dependencies when installing from PyPI. For example:
+
+```bash
+pip install apache-airflow-beckport-providers-apache-beam[google]
+```
+
+| Dependent package                                                                                         | Extra       |
+|:----------------------------------------------------------------------------------------------------------|:------------|
+| [apache-airflow-providers-apache-google](https://pypi.org/project/apache-airflow-providers-apache-google) | google      |
+
+
+# Provider classes summary
+
+In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.beam` provider
+are in the `airflow.providers.apache.beam` package. You can read more about the naming conventions used
+in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
+
+
+## Operators
+
+### New operators
+
+| New Airflow 2.0 operators: `airflow.providers.apache.beam` package                                                                                                                 |
+|:-----------------------------------------------------------------------------------------------------------------------------------------------|
+| [operators.beam.BeamRunJavaPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/operators/beam.py)    |
+| [operators.beam.BeamRunPythonPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/operators/beam.py)  |
+
+
+## Hooks
+
+### New hooks
+
+| New Airflow 2.0 hooks: `airflow.providers.apache.beam` package                                                   |
+|:-----------------------------------------------------------------------------------------------------------------|
+| [hooks.beam.BeamHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/hooks/beam.py) |
+
+
+## Releases
diff --git a/airflow/providers/apache/beam/CHANGELOG.rst b/airflow/providers/apache/beam/CHANGELOG.rst
new file mode 100644
index 0000000..cef7dda
--- /dev/null
+++ b/airflow/providers/apache/beam/CHANGELOG.rst
@@ -0,0 +1,25 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+
+Changelog
+---------
+
+1.0.0
+.....
+
+Initial version of the provider.
diff --git a/airflow/providers/apache/beam/README.md b/airflow/providers/apache/beam/README.md
new file mode 100644
index 0000000..3aa0ead
--- /dev/null
+++ b/airflow/providers/apache/beam/README.md
@@ -0,0 +1,97 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+ -->
+
+
+# Package apache-airflow-providers-apache-beam
+
+Release: 0.0.1
+
+**Table of contents**
+
+- [Provider package](#provider-package)
+- [Installation](#installation)
+- [PIP requirements](#pip-requirements)
+- [Cross provider package dependencies](#cross-provider-package-dependencies)
+- [Provider class summary](#provider-classes-summary)
+    - [Operators](#operators)
+    - [Transfer operators](#transfer-operators)
+    - [Hooks](#hooks)
+- [Releases](#releases)
+
+## Provider package
+
+This is a provider package for `apache.beam` provider. All classes for this provider package
+are in `airflow.providers.apache.beam` python package.
+
+## Installation
+
+NOTE!
+
+On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
+does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
+of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
+`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
+`--use-deprecated legacy-resolver` to your pip install command.
+
+You can install this package on top of an existing airflow 2.* installation via
+`pip install apache-airflow-providers-apache-beam`
+
+## Cross provider package dependencies
+
+Those are dependencies that might be needed in order to use all the features of the package.
+You need to install the specified backport providers package in order to use them.
+
+You can install such cross-provider dependencies when installing from PyPI. For example:
+
+```bash
+pip install apache-airflow-providers-apache-beam[google]
+```
+
+| Dependent package                                                                           | Extra       |
+|:--------------------------------------------------------------------------------------------|:------------|
+| [apache-airflow-providers-google](https://pypi.org/project/apache-airflow-providers-google) | google      |
+
+
+# Provider classes summary
+
+In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.beam` provider
+are in the `airflow.providers.apache.beam` package. You can read more about the naming conventions used
+in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages)
+
+
+## Operators
+
+### New operators
+
+| New Airflow 2.0 operators: `airflow.providers.apache.beam` package                                                                                                                 |
+|:-----------------------------------------------------------------------------------------------------------------------------------------------|
+| [operators.beam.BeamRunJavaPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/operators/beam.py)    |
+| [operators.beam.BeamRunPythonPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/operators/beam.py)  |
+
+
+## Hooks
+
+### New hooks
+
+| New Airflow 2.0 hooks: `airflow.providers.apache.beam` package                                                   |
+|:-----------------------------------------------------------------------------------------------------------------|
+| [hooks.beam.BeamHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/hooks/beam.py) |
+
+
+## Releases
diff --git a/airflow/providers/apache/beam/__init__.py b/airflow/providers/apache/beam/__init__.py
new file mode 100644
index 0000000..217e5db
--- /dev/null
+++ b/airflow/providers/apache/beam/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/apache/beam/example_dags/__init__.py b/airflow/providers/apache/beam/example_dags/__init__.py
new file mode 100644
index 0000000..217e5db
--- /dev/null
+++ b/airflow/providers/apache/beam/example_dags/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/apache/beam/example_dags/example_beam.py b/airflow/providers/apache/beam/example_dags/example_beam.py
new file mode 100644
index 0000000..d20c4ce
--- /dev/null
+++ b/airflow/providers/apache/beam/example_dags/example_beam.py
@@ -0,0 +1,315 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+Example Airflow DAG for Apache Beam operators
+"""
+import os
+from urllib.parse import urlparse
+
+from airflow import models
+from airflow.providers.apache.beam.operators.beam import (
+    BeamRunJavaPipelineOperator,
+    BeamRunPythonPipelineOperator,
+)
+from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus
+from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration
+from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor
+from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator
+from airflow.utils.dates import days_ago
+
+GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
+GCS_INPUT = os.environ.get('APACHE_BEAM_PYTHON', 'gs://apache-beam-samples/shakespeare/kinglear.txt')
+GCS_TMP = os.environ.get('APACHE_BEAM_GCS_TMP', 'gs://test-dataflow-example/temp/')
+GCS_STAGING = os.environ.get('APACHE_BEAM_GCS_STAGING', 'gs://test-dataflow-example/staging/')
+GCS_OUTPUT = os.environ.get('APACHE_BEAM_GCS_OUTPUT', 'gs://test-dataflow-example/output')
+GCS_PYTHON = os.environ.get('APACHE_BEAM_PYTHON', 'gs://test-dataflow-example/wordcount_debugging.py')
+GCS_PYTHON_DATAFLOW_ASYNC = os.environ.get(
+    'APACHE_BEAM_PYTHON_DATAFLOW_ASYNC', 'gs://test-dataflow-example/wordcount_debugging.py'
+)
+
+GCS_JAR_DIRECT_RUNNER = os.environ.get(
+    'APACHE_BEAM_DIRECT_RUNNER_JAR',
+    'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-DirectRunner.jar',
+)
+GCS_JAR_DATAFLOW_RUNNER = os.environ.get(
+    'APACHE_BEAM_DATAFLOW_RUNNER_JAR', 'gs://test-dataflow-example/word-count-beam-bundled-0.1.jar'
+)
+GCS_JAR_SPARK_RUNNER = os.environ.get(
+    'APACHE_BEAM_SPARK_RUNNER_JAR',
+    'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-SparkRunner.jar',
+)
+GCS_JAR_FLINK_RUNNER = os.environ.get(
+    'APACHE_BEAM_FLINK_RUNNER_JAR',
+    'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-FlinkRunner.jar',
+)
+
+GCS_JAR_DIRECT_RUNNER_PARTS = urlparse(GCS_JAR_DIRECT_RUNNER)
+GCS_JAR_DIRECT_RUNNER_BUCKET_NAME = GCS_JAR_DIRECT_RUNNER_PARTS.netloc
+GCS_JAR_DIRECT_RUNNER_OBJECT_NAME = GCS_JAR_DIRECT_RUNNER_PARTS.path[1:]
+GCS_JAR_DATAFLOW_RUNNER_PARTS = urlparse(GCS_JAR_DATAFLOW_RUNNER)
+GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME = GCS_JAR_DATAFLOW_RUNNER_PARTS.netloc
+GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME = GCS_JAR_DATAFLOW_RUNNER_PARTS.path[1:]
+GCS_JAR_SPARK_RUNNER_PARTS = urlparse(GCS_JAR_SPARK_RUNNER)
+GCS_JAR_SPARK_RUNNER_BUCKET_NAME = GCS_JAR_SPARK_RUNNER_PARTS.netloc
+GCS_JAR_SPARK_RUNNER_OBJECT_NAME = GCS_JAR_SPARK_RUNNER_PARTS.path[1:]
+GCS_JAR_FLINK_RUNNER_PARTS = urlparse(GCS_JAR_FLINK_RUNNER)
+GCS_JAR_FLINK_RUNNER_BUCKET_NAME = GCS_JAR_FLINK_RUNNER_PARTS.netloc
+GCS_JAR_FLINK_RUNNER_OBJECT_NAME = GCS_JAR_FLINK_RUNNER_PARTS.path[1:]
+
+
+default_args = {
+    'default_pipeline_options': {
+        'output': '/tmp/example_beam',
+    },
+    "trigger_rule": "all_done",
+}
+
+
+with models.DAG(
+    "example_beam_native_java_direct_runner",
+    schedule_interval=None,  # Override to match your needs
+    start_date=days_ago(1),
+    tags=['example'],
+) as dag_native_java_direct_runner:
+
+    # [START howto_operator_start_java_direct_runner_pipeline]
+    jar_to_local_direct_runner = GCSToLocalFilesystemOperator(
+        task_id="jar_to_local_direct_runner",
+        bucket=GCS_JAR_DIRECT_RUNNER_BUCKET_NAME,
+        object_name=GCS_JAR_DIRECT_RUNNER_OBJECT_NAME,
+        filename="/tmp/beam_wordcount_direct_runner_{{ ds_nodash }}.jar",
+    )
+
+    start_java_pipeline_direct_runner = BeamRunJavaPipelineOperator(
+        task_id="start_java_pipeline_direct_runner",
+        jar="/tmp/beam_wordcount_direct_runner_{{ ds_nodash }}.jar",
+        pipeline_options={
+            'output': '/tmp/start_java_pipeline_direct_runner',
+            'inputFile': GCS_INPUT,
+        },
+        job_class='org.apache.beam.examples.WordCount',
+    )
+
+    jar_to_local_direct_runner >> start_java_pipeline_direct_runner
+    # [END howto_operator_start_java_direct_runner_pipeline]
+
+with models.DAG(
+    "example_beam_native_java_dataflow_runner",
+    schedule_interval=None,  # Override to match your needs
+    start_date=days_ago(1),
+    tags=['example'],
+) as dag_native_java_dataflow_runner:
+    # [START howto_operator_start_java_dataflow_runner_pipeline]
+    jar_to_local_dataflow_runner = GCSToLocalFilesystemOperator(
+        task_id="jar_to_local_dataflow_runner",
+        bucket=GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME,
+        object_name=GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME,
+        filename="/tmp/beam_wordcount_dataflow_runner_{{ ds_nodash }}.jar",
+    )
+
+    start_java_pipeline_dataflow = BeamRunJavaPipelineOperator(
+        task_id="start_java_pipeline_dataflow",
+        runner="DataflowRunner",
+        jar="/tmp/beam_wordcount_dataflow_runner_{{ ds_nodash }}.jar",
+        pipeline_options={
+            'tempLocation': GCS_TMP,
+            'stagingLocation': GCS_STAGING,
+            'output': GCS_OUTPUT,
+        },
+        job_class='org.apache.beam.examples.WordCount',
+        dataflow_config={"job_name": "{{task.task_id}}", "location": "us-central1"},
+    )
+
+    jar_to_local_dataflow_runner >> start_java_pipeline_dataflow
+    # [END howto_operator_start_java_dataflow_runner_pipeline]
+
+with models.DAG(
+    "example_beam_native_java_spark_runner",
+    schedule_interval=None,  # Override to match your needs
+    start_date=days_ago(1),
+    tags=['example'],
+) as dag_native_java_spark_runner:
+
+    jar_to_local_spark_runner = GCSToLocalFilesystemOperator(
+        task_id="jar_to_local_spark_runner",
+        bucket=GCS_JAR_SPARK_RUNNER_BUCKET_NAME,
+        object_name=GCS_JAR_SPARK_RUNNER_OBJECT_NAME,
+        filename="/tmp/beam_wordcount_spark_runner_{{ ds_nodash }}.jar",
+    )
+
+    start_java_pipeline_spark_runner = BeamRunJavaPipelineOperator(
+        task_id="start_java_pipeline_spark_runner",
+        runner="SparkRunner",
+        jar="/tmp/beam_wordcount_spark_runner_{{ ds_nodash }}.jar",
+        pipeline_options={
+            'output': '/tmp/start_java_pipeline_spark_runner',
+            'inputFile': GCS_INPUT,
+        },
+        job_class='org.apache.beam.examples.WordCount',
+    )
+
+    jar_to_local_spark_runner >> start_java_pipeline_spark_runner
+
+with models.DAG(
+    "example_beam_native_java_flink_runner",
+    schedule_interval=None,  # Override to match your needs
+    start_date=days_ago(1),
+    tags=['example'],
+) as dag_native_java_flink_runner:
+
+    jar_to_local_flink_runner = GCSToLocalFilesystemOperator(
+        task_id="jar_to_local_flink_runner",
+        bucket=GCS_JAR_FLINK_RUNNER_BUCKET_NAME,
+        object_name=GCS_JAR_FLINK_RUNNER_OBJECT_NAME,
+        filename="/tmp/beam_wordcount_flink_runner_{{ ds_nodash }}.jar",
+    )
+
+    start_java_pipeline_flink_runner = BeamRunJavaPipelineOperator(
+        task_id="start_java_pipeline_flink_runner",
+        runner="FlinkRunner",
+        jar="/tmp/beam_wordcount_flink_runner_{{ ds_nodash }}.jar",
+        pipeline_options={
+            'output': '/tmp/start_java_pipeline_flink_runner',
+            'inputFile': GCS_INPUT,
+        },
+        job_class='org.apache.beam.examples.WordCount',
+    )
+
+    jar_to_local_flink_runner >> start_java_pipeline_flink_runner
+
+
+with models.DAG(
+    "example_beam_native_python",
+    default_args=default_args,
+    start_date=days_ago(1),
+    schedule_interval=None,  # Override to match your needs
+    tags=['example'],
+) as dag_native_python:
+
+    # [START howto_operator_start_python_direct_runner_pipeline_local_file]
+    start_python_pipeline_local_direct_runner = BeamRunPythonPipelineOperator(
+        task_id="start_python_pipeline_local_direct_runner",
+        py_file='apache_beam.examples.wordcount',
+        py_options=['-m'],
+        py_requirements=['apache-beam[gcp]==2.26.0'],
+        py_interpreter='python3',
+        py_system_site_packages=False,
+    )
+    # [END howto_operator_start_python_direct_runner_pipeline_local_file]
+
+    # [START howto_operator_start_python_direct_runner_pipeline_gcs_file]
+    start_python_pipeline_direct_runner = BeamRunPythonPipelineOperator(
+        task_id="start_python_pipeline_direct_runner",
+        py_file=GCS_PYTHON,
+        py_options=[],
+        pipeline_options={"output": GCS_OUTPUT},
+        py_requirements=['apache-beam[gcp]==2.26.0'],
+        py_interpreter='python3',
+        py_system_site_packages=False,
+    )
+    # [END howto_operator_start_python_direct_runner_pipeline_gcs_file]
+
+    # [START howto_operator_start_python_dataflow_runner_pipeline_gcs_file]
+    start_python_pipeline_dataflow_runner = BeamRunPythonPipelineOperator(
+        task_id="start_python_pipeline_dataflow_runner",
+        runner="DataflowRunner",
+        py_file=GCS_PYTHON,
+        pipeline_options={
+            'tempLocation': GCS_TMP,
+            'stagingLocation': GCS_STAGING,
+            'output': GCS_OUTPUT,
+        },
+        py_options=[],
+        py_requirements=['apache-beam[gcp]==2.26.0'],
+        py_interpreter='python3',
+        py_system_site_packages=False,
+        dataflow_config=DataflowConfiguration(
+            job_name='{{task.task_id}}', project_id=GCP_PROJECT_ID, location="us-central1"
+        ),
+    )
+    # [END howto_operator_start_python_dataflow_runner_pipeline_gcs_file]
+
+    start_python_pipeline_local_spark_runner = BeamRunPythonPipelineOperator(
+        task_id="start_python_pipeline_local_spark_runner",
+        py_file='apache_beam.examples.wordcount',
+        runner="SparkRunner",
+        py_options=['-m'],
+        py_requirements=['apache-beam[gcp]==2.26.0'],
+        py_interpreter='python3',
+        py_system_site_packages=False,
+    )
+
+    start_python_pipeline_local_flink_runner = BeamRunPythonPipelineOperator(
+        task_id="start_python_pipeline_local_flink_runner",
+        py_file='apache_beam.examples.wordcount',
+        runner="FlinkRunner",
+        py_options=['-m'],
+        pipeline_options={
+            'output': '/tmp/start_python_pipeline_local_flink_runner',
+        },
+        py_requirements=['apache-beam[gcp]==2.26.0'],
+        py_interpreter='python3',
+        py_system_site_packages=False,
+    )
+
+    [
+        start_python_pipeline_local_direct_runner,
+        start_python_pipeline_direct_runner,
+    ] >> start_python_pipeline_local_flink_runner >> start_python_pipeline_local_spark_runner
+
+
+with models.DAG(
+    "example_beam_native_python_dataflow_async",
+    default_args=default_args,
+    start_date=days_ago(1),
+    schedule_interval=None,  # Override to match your needs
+    tags=['example'],
+) as dag_native_python_dataflow_async:
+    # [START howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file]
+    start_python_job_dataflow_runner_async = BeamRunPythonPipelineOperator(
+        task_id="start_python_job_dataflow_runner_async",
+        runner="DataflowRunner",
+        py_file=GCS_PYTHON_DATAFLOW_ASYNC,
+        pipeline_options={
+            'tempLocation': GCS_TMP,
+            'stagingLocation': GCS_STAGING,
+            'output': GCS_OUTPUT,
+        },
+        py_options=[],
+        py_requirements=['apache-beam[gcp]==2.26.0'],
+        py_interpreter='python3',
+        py_system_site_packages=False,
+        dataflow_config=DataflowConfiguration(
+            job_name='{{task.task_id}}',
+            project_id=GCP_PROJECT_ID,
+            location="us-central1",
+            wait_until_finished=False,
+        ),
+    )
+
+    wait_for_python_job_dataflow_runner_async_done = DataflowJobStatusSensor(
+        task_id="wait-for-python-job-async-done",
+        job_id="{{task_instance.xcom_pull('start_python_job_dataflow_runner_async')['dataflow_job_id']}}",
+        expected_statuses={DataflowJobStatus.JOB_STATE_DONE},
+        project_id=GCP_PROJECT_ID,
+        location='us-central1',
+    )
+
+    start_python_job_dataflow_runner_async >> wait_for_python_job_dataflow_runner_async_done
+    # [END howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file]
diff --git a/airflow/providers/apache/beam/hooks/__init__.py b/airflow/providers/apache/beam/hooks/__init__.py
new file mode 100644
index 0000000..217e5db
--- /dev/null
+++ b/airflow/providers/apache/beam/hooks/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/apache/beam/hooks/beam.py b/airflow/providers/apache/beam/hooks/beam.py
new file mode 100644
index 0000000..8e188b0
--- /dev/null
+++ b/airflow/providers/apache/beam/hooks/beam.py
@@ -0,0 +1,289 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""This module contains a Apache Beam Hook."""
+import json
+import select
+import shlex
+import subprocess
+import textwrap
+from tempfile import TemporaryDirectory
+from typing import Callable, List, Optional
+
+from airflow.exceptions import AirflowException
+from airflow.hooks.base_hook import BaseHook
+from airflow.utils.log.logging_mixin import LoggingMixin
+from airflow.utils.python_virtualenv import prepare_virtualenv
+
+
+class BeamRunnerType:
+    """
+    Helper class for listing runner types.
+    For more information about runners see:
+    https://beam.apache.org/documentation/
+    """
+
+    DataflowRunner = "DataflowRunner"
+    DirectRunner = "DirectRunner"
+    SparkRunner = "SparkRunner"
+    FlinkRunner = "FlinkRunner"
+    SamzaRunner = "SamzaRunner"
+    NemoRunner = "NemoRunner"
+    JetRunner = "JetRunner"
+    Twister2Runner = "Twister2Runner"
+
+
+def beam_options_to_args(options: dict) -> List[str]:
+    """
+    Returns a formatted pipeline options from a dictionary of arguments
+
+    The logic of this method should be compatible with Apache Beam:
+    https://github.com/apache/beam/blob/b56740f0e8cd80c2873412847d0b336837429fb9/sdks/python/
+    apache_beam/options/pipeline_options.py#L230-L251
+
+    :param options: Dictionary with options
+    :type options: dict
+    :return: List of arguments
+    :rtype: List[str]
+    """
+    if not options:
+        return []
+
+    args: List[str] = []
+    for attr, value in options.items():
+        if value is None or (isinstance(value, bool) and value):
+            args.append(f"--{attr}")
+        elif isinstance(value, list):
+            args.extend([f"--{attr}={v}" for v in value])
+        else:
+            args.append(f"--{attr}={value}")
+    return args
+
+
+class BeamCommandRunner(LoggingMixin):
+    """
+    Class responsible for running pipeline command in subprocess
+
+    :param cmd: Parts of the command to be run in subprocess
+    :type cmd: List[str]
+    :param process_line_callback: Optional callback which can be used to process
+        stdout and stderr to detect job id
+    :type process_line_callback: Optional[Callable[[str], None]]
+    """
+
+    def __init__(
+        self,
+        cmd: List[str],
+        process_line_callback: Optional[Callable[[str], None]] = None,
+    ) -> None:
+        super().__init__()
+        self.log.info("Running command: %s", " ".join(shlex.quote(c) for c in cmd))
+        self.process_line_callback = process_line_callback
+        self.job_id: Optional[str] = None
+        self._proc = subprocess.Popen(
+            cmd,
+            shell=False,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+            close_fds=True,
+        )
+
+    def _process_fd(self, fd):
+        """
+        Prints output to logs.
+
+        :param fd: File descriptor.
+        """
+        if fd not in (self._proc.stdout, self._proc.stderr):
+            raise Exception("No data in stderr or in stdout.")
+
+        fd_to_log = {self._proc.stderr: self.log.warning, self._proc.stdout: self.log.info}
+        func_log = fd_to_log[fd]
+
+        while True:
+            line = fd.readline().decode()
+            if not line:
+                return
+            if self.process_line_callback:
+                self.process_line_callback(line)
+            func_log(line.rstrip("\n"))
+
+    def wait_for_done(self) -> None:
+        """Waits for Apache Beam pipeline to complete."""
+        self.log.info("Start waiting for Apache Beam process to complete.")
+        reads = [self._proc.stderr, self._proc.stdout]
+        while True:
+            # Wait for at least one available fd.
+            readable_fds, _, _ = select.select(reads, [], [], 5)
+            if readable_fds is None:
+                self.log.info("Waiting for Apache Beam process to complete.")
+                continue
+
+            for readable_fd in readable_fds:
+                self._process_fd(readable_fd)
+
+            if self._proc.poll() is not None:
+                break
+
+        # Corner case: check if more output was created between the last read and the process termination
+        for readable_fd in reads:
+            self._process_fd(readable_fd)
+
+        self.log.info("Process exited with return code: %s", self._proc.returncode)
+
+        if self._proc.returncode != 0:
+            raise AirflowException(f"Apache Beam process failed with return code {self._proc.returncode}")
+
+
+class BeamHook(BaseHook):
+    """
+    Hook for Apache Beam.
+
+    All the methods in the hook where project_id is used must be called with
+    keyword arguments rather than positional.
+
+    :param runner: Runner type
+    :type runner: str
+    """
+
+    def __init__(
+        self,
+        runner: str,
+    ) -> None:
+        self.runner = runner
+        super().__init__()
+
+    def _start_pipeline(
+        self,
+        variables: dict,
+        command_prefix: List[str],
+        process_line_callback: Optional[Callable[[str], None]] = None,
+    ) -> None:
+        cmd = command_prefix + [
+            f"--runner={self.runner}",
+        ]
+        if variables:
+            cmd.extend(beam_options_to_args(variables))
+        cmd_runner = BeamCommandRunner(
+            cmd=cmd,
+            process_line_callback=process_line_callback,
+        )
+        cmd_runner.wait_for_done()
+
+    def start_python_pipeline(  # pylint: disable=too-many-arguments
+        self,
+        variables: dict,
+        py_file: str,
+        py_options: List[str],
+        py_interpreter: str = "python3",
+        py_requirements: Optional[List[str]] = None,
+        py_system_site_packages: bool = False,
+        process_line_callback: Optional[Callable[[str], None]] = None,
+    ):
+        """
+        Starts Apache Beam python pipeline.
+
+        :param variables: Variables passed to the pipeline.
+        :type variables: Dict
+        :param py_options: Additional options.
+        :type py_options: List[str]
+        :param py_interpreter: Python version of the Apache Beam pipeline.
+            If None, this defaults to the python3.
+            To track python versions supported by beam and related
+            issues check: https://issues.apache.org/jira/browse/BEAM-1251
+        :type py_interpreter: str
+        :param py_requirements: Additional python package(s) to install.
+            If a value is passed to this parameter, a new virtual environment has been created with
+            additional packages installed.
+
+            You could also install the apache-beam package if it is not installed on your system or you want
+            to use a different version.
+        :type py_requirements: List[str]
+        :param py_system_site_packages: Whether to include system_site_packages in your virtualenv.
+            See virtualenv documentation for more information.
+
+            This option is only relevant if the ``py_requirements`` parameter is not None.
+        :type py_system_site_packages: bool
+        :param on_new_job_id_callback: Callback called when the job ID is known.
+        :type on_new_job_id_callback: callable
+        """
+        if "labels" in variables:
+            variables["labels"] = [f"{key}={value}" for key, value in variables["labels"].items()]
+
+        if py_requirements is not None:
+            if not py_requirements and not py_system_site_packages:
+                warning_invalid_environment = textwrap.dedent(
+                    """\
+                    Invalid method invocation. You have disabled inclusion of system packages and empty list
+                    required for installation, so it is not possible to create a valid virtual environment.
+                    In the virtual environment, apache-beam package must be installed for your job to be \
+                    executed. To fix this problem:
+                    * install apache-beam on the system, then set parameter py_system_site_packages to True,
+                    * add apache-beam to the list of required packages in parameter py_requirements.
+                    """
+                )
+                raise AirflowException(warning_invalid_environment)
+
+            with TemporaryDirectory(prefix="apache-beam-venv") as tmp_dir:
+                py_interpreter = prepare_virtualenv(
+                    venv_directory=tmp_dir,
+                    python_bin=py_interpreter,
+                    system_site_packages=py_system_site_packages,
+                    requirements=py_requirements,
+                )
+                command_prefix = [py_interpreter] + py_options + [py_file]
+
+                self._start_pipeline(
+                    variables=variables,
+                    command_prefix=command_prefix,
+                    process_line_callback=process_line_callback,
+                )
+        else:
+            command_prefix = [py_interpreter] + py_options + [py_file]
+
+            self._start_pipeline(
+                variables=variables,
+                command_prefix=command_prefix,
+                process_line_callback=process_line_callback,
+            )
+
+    def start_java_pipeline(
+        self,
+        variables: dict,
+        jar: str,
+        job_class: Optional[str] = None,
+        process_line_callback: Optional[Callable[[str], None]] = None,
+    ) -> None:
+        """
+        Starts Apache Beam Java pipeline.
+
+        :param variables: Variables passed to the job.
+        :type variables: dict
+        :param jar: Name of the jar for the pipeline
+        :type job_class: str
+        :param job_class: Name of the java class for the pipeline.
+        :type job_class: str
+        """
+        if "labels" in variables:
+            variables["labels"] = json.dumps(variables["labels"], separators=(",", ":"))
+
+        command_prefix = ["java", "-cp", jar, job_class] if job_class else ["java", "-jar", jar]
+        self._start_pipeline(
+            variables=variables,
+            command_prefix=command_prefix,
+            process_line_callback=process_line_callback,
+        )
diff --git a/airflow/providers/apache/beam/operators/__init__.py b/airflow/providers/apache/beam/operators/__init__.py
new file mode 100644
index 0000000..217e5db
--- /dev/null
+++ b/airflow/providers/apache/beam/operators/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/apache/beam/operators/beam.py b/airflow/providers/apache/beam/operators/beam.py
new file mode 100644
index 0000000..849298e
--- /dev/null
+++ b/airflow/providers/apache/beam/operators/beam.py
@@ -0,0 +1,446 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""This module contains Apache Beam operators."""
+from contextlib import ExitStack
+from typing import Callable, List, Optional, Union
+
+from airflow.models import BaseOperator
+from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType
+from airflow.providers.google.cloud.hooks.dataflow import (
+    DataflowHook,
+    process_line_and_extract_dataflow_job_id_callback,
+)
+from airflow.providers.google.cloud.hooks.gcs import GCSHook
+from airflow.providers.google.cloud.operators.dataflow import CheckJobRunning, DataflowConfiguration
+from airflow.utils.decorators import apply_defaults
+from airflow.utils.helpers import convert_camel_to_snake
+from airflow.version import version
+
+
+class BeamRunPythonPipelineOperator(BaseOperator):
+    """
+    Launching Apache Beam pipelines written in Python. Note that both
+    ``default_pipeline_options`` and ``pipeline_options`` will be merged to specify pipeline
+    execution parameter, and ``default_pipeline_options`` is expected to save
+    high-level options, for instances, project and zone information, which
+    apply to all beam operators in the DAG.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:BeamRunPythonPipelineOperator`
+
+    .. seealso::
+        For more detail on Apache Beam have a look at the reference:
+        https://beam.apache.org/documentation/
+
+    :param py_file: Reference to the python Apache Beam pipeline file.py, e.g.,
+        /some/local/file/path/to/your/python/pipeline/file. (templated)
+    :type py_file: str
+    :param runner: Runner on which pipeline will be run. By default "DirectRunner" is being used.
+        Other possible options: DataflowRunner, SparkRunner, FlinkRunner.
+        See: :class:`~providers.apache.beam.hooks.beam.BeamRunnerType`
+        See: https://beam.apache.org/documentation/runners/capability-matrix/
+
+        If you use Dataflow runner check dedicated operator:
+        :class:`~providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator`
+    :type runner: str
+    :param py_options: Additional python options, e.g., ["-m", "-v"].
+    :type py_options: list[str]
+    :param default_pipeline_options: Map of default pipeline options.
+    :type default_pipeline_options: dict
+    :param pipeline_options: Map of pipeline options.The key must be a dictionary.
+        The value can contain different types:
+
+        * If the value is None, the single option - ``--key`` (without value) will be added.
+        * If the value is False, this option will be skipped
+        * If the value is True, the single option - ``--key`` (without value) will be added.
+        * If the value is list, the many options will be added for each key.
+          If the value is ``['A', 'B']`` and the key is ``key`` then the ``--key=A --key-B`` options
+          will be left
+        * Other value types will be replaced with the Python textual representation.
+
+        When defining labels (``labels`` option), you can also provide a dictionary.
+    :type pipeline_options: dict
+    :param py_interpreter: Python version of the beam pipeline.
+        If None, this defaults to the python3.
+        To track python versions supported by beam and related
+        issues check: https://issues.apache.org/jira/browse/BEAM-1251
+    :type py_interpreter: str
+    :param py_requirements: Additional python package(s) to install.
+        If a value is passed to this parameter, a new virtual environment has been created with
+        additional packages installed.
+
+        You could also install the apache_beam package if it is not installed on your system or you want
+        to use a different version.
+    :type py_requirements: List[str]
+    :param py_system_site_packages: Whether to include system_site_packages in your virtualenv.
+        See virtualenv documentation for more information.
+
+        This option is only relevant if the ``py_requirements`` parameter is not None.
+    :param gcp_conn_id: Optional.
+        The connection ID to use connecting to Google Cloud Storage if python file is on GCS.
+    :type gcp_conn_id: str
+    :param delegate_to:  Optional.
+        The account to impersonate using domain-wide delegation of authority,
+        if any. For this to work, the service account making the request must have
+        domain-wide delegation enabled.
+    :type delegate_to: str
+    :param dataflow_config: Dataflow configuration, used when runner type is set to DataflowRunner
+    :type dataflow_config: Union[dict, providers.google.cloud.operators.dataflow.DataflowConfiguration]
+    """
+
+    template_fields = ["py_file", "runner", "pipeline_options", "default_pipeline_options", "dataflow_config"]
+    template_fields_renderers = {'dataflow_config': 'json', 'pipeline_options': 'json'}
+
+    @apply_defaults
+    def __init__(
+        self,
+        *,
+        py_file: str,
+        runner: str = "DirectRunner",
+        default_pipeline_options: Optional[dict] = None,
+        pipeline_options: Optional[dict] = None,
+        py_interpreter: str = "python3",
+        py_options: Optional[List[str]] = None,
+        py_requirements: Optional[List[str]] = None,
+        py_system_site_packages: bool = False,
+        gcp_conn_id: str = "google_cloud_default",
+        delegate_to: Optional[str] = None,
+        dataflow_config: Optional[Union[DataflowConfiguration, dict]] = None,
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+
+        self.py_file = py_file
+        self.runner = runner
+        self.py_options = py_options or []
+        self.default_pipeline_options = default_pipeline_options or {}
+        self.pipeline_options = pipeline_options or {}
+        self.pipeline_options.setdefault("labels", {}).update(
+            {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")}
+        )
+        self.py_interpreter = py_interpreter
+        self.py_requirements = py_requirements
+        self.py_system_site_packages = py_system_site_packages
+        self.gcp_conn_id = gcp_conn_id
+        self.delegate_to = delegate_to
+        self.dataflow_config = dataflow_config or {}
+        self.beam_hook: Optional[BeamHook] = None
+        self.dataflow_hook: Optional[DataflowHook] = None
+        self.dataflow_job_id: Optional[str] = None
+
+        if self.dataflow_config and self.runner.lower() != BeamRunnerType.DataflowRunner.lower():
+            self.log.warning(
+                "dataflow_config is defined but runner is different than DataflowRunner (%s)", self.runner
+            )
+
+    def execute(self, context):
+        """Execute the Apache Beam Pipeline."""
+        self.beam_hook = BeamHook(runner=self.runner)
+        pipeline_options = self.default_pipeline_options.copy()
+        process_line_callback: Optional[Callable] = None
+        is_dataflow = self.runner.lower() == BeamRunnerType.DataflowRunner.lower()
+
+        if isinstance(self.dataflow_config, dict):
+            self.dataflow_config = DataflowConfiguration(**self.dataflow_config)
+
+        if is_dataflow:
+            self.dataflow_hook = DataflowHook(
+                gcp_conn_id=self.dataflow_config.gcp_conn_id or self.gcp_conn_id,
+                delegate_to=self.dataflow_config.delegate_to or self.delegate_to,
+                poll_sleep=self.dataflow_config.poll_sleep,
+                impersonation_chain=self.dataflow_config.impersonation_chain,
+                drain_pipeline=self.dataflow_config.drain_pipeline,
+                cancel_timeout=self.dataflow_config.cancel_timeout,
+                wait_until_finished=self.dataflow_config.wait_until_finished,
+            )
+            self.dataflow_config.project_id = self.dataflow_config.project_id or self.dataflow_hook.project_id
+
+            dataflow_job_name = DataflowHook.build_dataflow_job_name(
+                self.dataflow_config.job_name, self.dataflow_config.append_job_name
+            )
+            pipeline_options["job_name"] = dataflow_job_name
+            pipeline_options["project"] = self.dataflow_config.project_id
+            pipeline_options["region"] = self.dataflow_config.location
+            pipeline_options.setdefault("labels", {}).update(
+                {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")}
+            )
+
+            def set_current_dataflow_job_id(job_id):
+                self.dataflow_job_id = job_id
+
+            process_line_callback = process_line_and_extract_dataflow_job_id_callback(
+                on_new_job_id_callback=set_current_dataflow_job_id
+            )
+
+        pipeline_options.update(self.pipeline_options)
+
+        # Convert argument names from lowerCamelCase to snake case.
+        formatted_pipeline_options = {
+            convert_camel_to_snake(key): pipeline_options[key] for key in pipeline_options
+        }
+
+        with ExitStack() as exit_stack:
+            if self.py_file.lower().startswith("gs://"):
+                gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to)
+                tmp_gcs_file = exit_stack.enter_context(  # pylint: disable=no-member
+                    gcs_hook.provide_file(object_url=self.py_file)
+                )
+                self.py_file = tmp_gcs_file.name
+
+            self.beam_hook.start_python_pipeline(
+                variables=formatted_pipeline_options,
+                py_file=self.py_file,
+                py_options=self.py_options,
+                py_interpreter=self.py_interpreter,
+                py_requirements=self.py_requirements,
+                py_system_site_packages=self.py_system_site_packages,
+                process_line_callback=process_line_callback,
+            )
+
+            if is_dataflow:
+                self.dataflow_hook.wait_for_done(  # pylint: disable=no-value-for-parameter
+                    job_name=dataflow_job_name,
+                    location=self.dataflow_config.location,
+                    job_id=self.dataflow_job_id,
+                    multiple_jobs=False,
+                )
+
+        return {"dataflow_job_id": self.dataflow_job_id}
+
+    def on_kill(self) -> None:
+        if self.dataflow_hook and self.dataflow_job_id:
+            self.log.info('Dataflow job with id: `%s` was requested to be cancelled.', self.dataflow_job_id)
+            self.dataflow_hook.cancel_job(
+                job_id=self.dataflow_job_id,
+                project_id=self.dataflow_config.project_id,
+            )
+
+
+# pylint: disable=too-many-instance-attributes
+class BeamRunJavaPipelineOperator(BaseOperator):
+    """
+    Launching Apache Beam pipelines written in Java.
+
+    Note that both
+    ``default_pipeline_options`` and ``pipeline_options`` will be merged to specify pipeline
+    execution parameter, and ``default_pipeline_options`` is expected to save
+    high-level pipeline_options, for instances, project and zone information, which
+    apply to all Apache Beam operators in the DAG.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:BeamRunJavaPipelineOperator`
+
+    .. seealso::
+        For more detail on Apache Beam have a look at the reference:
+        https://beam.apache.org/documentation/
+
+    You need to pass the path to your jar file as a file reference with the ``jar``
+    parameter, the jar needs to be a self executing jar (see documentation here:
+    https://beam.apache.org/documentation/runners/dataflow/#self-executing-jar).
+    Use ``pipeline_options`` to pass on pipeline_options to your job.
+
+    :param jar: The reference to a self executing Apache Beam jar (templated).
+    :type jar: str
+    :param runner: Runner on which pipeline will be run. By default "DirectRunner" is being used.
+        See:
+        https://beam.apache.org/documentation/runners/capability-matrix/
+        If you use Dataflow runner check dedicated operator:
+        :class:`~providers.google.cloud.operators.dataflow.DataflowCreateJavaJobOperator`
+    :type runner: str
+    :param job_class: The name of the Apache Beam pipeline class to be executed, it
+        is often not the main class configured in the pipeline jar file.
+    :type job_class: str
+    :param default_pipeline_options: Map of default job pipeline_options.
+    :type default_pipeline_options: dict
+    :param pipeline_options: Map of job specific pipeline_options.The key must be a dictionary.
+        The value can contain different types:
+
+        * If the value is None, the single option - ``--key`` (without value) will be added.
+        * If the value is False, this option will be skipped
+        * If the value is True, the single option - ``--key`` (without value) will be added.
+        * If the value is list, the many pipeline_options will be added for each key.
+          If the value is ``['A', 'B']`` and the key is ``key`` then the ``--key=A --key-B`` pipeline_options
+          will be left
+        * Other value types will be replaced with the Python textual representation.
+
+        When defining labels (``labels`` option), you can also provide a dictionary.
+    :type pipeline_options: dict
+    :param gcp_conn_id: The connection ID to use connecting to Google Cloud Storage if jar is on GCS
+    :type gcp_conn_id: str
+    :param delegate_to: The account to impersonate using domain-wide delegation of authority,
+        if any. For this to work, the service account making the request must have
+        domain-wide delegation enabled.
+    :type delegate_to: str
+    :param dataflow_config: Dataflow configuration, used when runner type is set to DataflowRunner
+    :type dataflow_config: Union[dict, providers.google.cloud.operators.dataflow.DataflowConfiguration]
+    """
+
+    template_fields = [
+        "jar",
+        "runner",
+        "job_class",
+        "pipeline_options",
+        "default_pipeline_options",
+        "dataflow_config",
+    ]
+    template_fields_renderers = {'dataflow_config': 'json', 'pipeline_options': 'json'}
+    ui_color = "#0273d4"
+
+    @apply_defaults
+    def __init__(
+        self,
+        *,
+        jar: str,
+        runner: str = "DirectRunner",
+        job_class: Optional[str] = None,
+        default_pipeline_options: Optional[dict] = None,
+        pipeline_options: Optional[dict] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        delegate_to: Optional[str] = None,
+        dataflow_config: Optional[Union[DataflowConfiguration, dict]] = None,
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+
+        self.jar = jar
+        self.runner = runner
+        self.default_pipeline_options = default_pipeline_options or {}
+        self.pipeline_options = pipeline_options or {}
+        self.job_class = job_class
+        self.dataflow_config = dataflow_config or {}
+        self.gcp_conn_id = gcp_conn_id
+        self.delegate_to = delegate_to
+        self.dataflow_job_id = None
+        self.dataflow_hook: Optional[DataflowHook] = None
+        self.beam_hook: Optional[BeamHook] = None
+        self._dataflow_job_name: Optional[str] = None
+
+        if self.dataflow_config and self.runner.lower() != BeamRunnerType.DataflowRunner.lower():
+            self.log.warning(
+                "dataflow_config is defined but runner is different than DataflowRunner (%s)", self.runner
+            )
+
+    def execute(self, context):
+        """Execute the Apache Beam Pipeline."""
+        self.beam_hook = BeamHook(runner=self.runner)
+        pipeline_options = self.default_pipeline_options.copy()
+        process_line_callback: Optional[Callable] = None
+        is_dataflow = self.runner.lower() == BeamRunnerType.DataflowRunner.lower()
+
+        if isinstance(self.dataflow_config, dict):
+            self.dataflow_config = DataflowConfiguration(**self.dataflow_config)
+
+        if is_dataflow:
+            self.dataflow_hook = DataflowHook(
+                gcp_conn_id=self.dataflow_config.gcp_conn_id or self.gcp_conn_id,
+                delegate_to=self.dataflow_config.delegate_to or self.delegate_to,
+                poll_sleep=self.dataflow_config.poll_sleep,
+                impersonation_chain=self.dataflow_config.impersonation_chain,
+                drain_pipeline=self.dataflow_config.drain_pipeline,
+                cancel_timeout=self.dataflow_config.cancel_timeout,
+                wait_until_finished=self.dataflow_config.wait_until_finished,
+            )
+            self.dataflow_config.project_id = self.dataflow_config.project_id or self.dataflow_hook.project_id
+
+            self._dataflow_job_name = DataflowHook.build_dataflow_job_name(
+                self.dataflow_config.job_name, self.dataflow_config.append_job_name
+            )
+            pipeline_options["jobName"] = self.dataflow_config.job_name
+            pipeline_options["project"] = self.dataflow_config.project_id
+            pipeline_options["region"] = self.dataflow_config.location
+            pipeline_options.setdefault("labels", {}).update(
+                {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")}
+            )
+
+            def set_current_dataflow_job_id(job_id):
+                self.dataflow_job_id = job_id
+
+            process_line_callback = process_line_and_extract_dataflow_job_id_callback(
+                on_new_job_id_callback=set_current_dataflow_job_id
+            )
+
+        pipeline_options.update(self.pipeline_options)
+
+        with ExitStack() as exit_stack:
+            if self.jar.lower().startswith("gs://"):
+                gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to)
+                tmp_gcs_file = exit_stack.enter_context(  # pylint: disable=no-member
+                    gcs_hook.provide_file(object_url=self.jar)
+                )
+                self.jar = tmp_gcs_file.name
+
+            if is_dataflow:
+                is_running = False
+                if self.dataflow_config.check_if_running != CheckJobRunning.IgnoreJob:
+                    is_running = (
+                        # The reason for disable=no-value-for-parameter is that project_id parameter is
+                        # required but here is not passed, moreover it cannot be passed here.
+                        # This method is wrapped by @_fallback_to_project_id_from_variables decorator which
+                        # fallback project_id value from variables and raise error if project_id is
+                        # defined both in variables and as parameter (here is already defined in variables)
+                        self.dataflow_hook.is_job_dataflow_running(  # pylint: disable=no-value-for-parameter
+                            name=self.dataflow_config.job_name,
+                            variables=pipeline_options,
+                        )
+                    )
+                    while is_running and self.dataflow_config.check_if_running == CheckJobRunning.WaitForRun:
+                        # The reason for disable=no-value-for-parameter is that project_id parameter is
+                        # required but here is not passed, moreover it cannot be passed here.
+                        # This method is wrapped by @_fallback_to_project_id_from_variables decorator which
+                        # fallback project_id value from variables and raise error if project_id is
+                        # defined both in variables and as parameter (here is already defined in variables)
+                        # pylint: disable=no-value-for-parameter
+                        is_running = self.dataflow_hook.is_job_dataflow_running(
+                            name=self.dataflow_config.job_name,
+                            variables=pipeline_options,
+                        )
+                if not is_running:
+                    pipeline_options["jobName"] = self._dataflow_job_name
+                    self.beam_hook.start_java_pipeline(
+                        variables=pipeline_options,
+                        jar=self.jar,
+                        job_class=self.job_class,
+                        process_line_callback=process_line_callback,
+                    )
+                    self.dataflow_hook.wait_for_done(
+                        job_name=self._dataflow_job_name,
+                        location=self.dataflow_config.location,
+                        job_id=self.dataflow_job_id,
+                        multiple_jobs=self.dataflow_config.multiple_jobs,
+                        project_id=self.dataflow_config.project_id,
+                    )
+
+            else:
+                self.beam_hook.start_java_pipeline(
+                    variables=pipeline_options,
+                    jar=self.jar,
+                    job_class=self.job_class,
+                    process_line_callback=process_line_callback,
+                )
+
+        return {"dataflow_job_id": self.dataflow_job_id}
+
+    def on_kill(self) -> None:
+        if self.dataflow_hook and self.dataflow_job_id:
+            self.log.info('Dataflow job with id: `%s` was requested to be cancelled.', self.dataflow_job_id)
+            self.dataflow_hook.cancel_job(
+                job_id=self.dataflow_job_id,
+                project_id=self.dataflow_config.project_id,
+            )
diff --git a/airflow/providers/apache/beam/provider.yaml b/airflow/providers/apache/beam/provider.yaml
new file mode 100644
index 0000000..4325265
--- /dev/null
+++ b/airflow/providers/apache/beam/provider.yaml
@@ -0,0 +1,45 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+---
+package-name: apache-airflow-providers-apache-beam
+name: Apache Beam
+description: |
+    `Apache Beam <https://beam.apache.org/>`__.
+
+versions:
+  - 0.0.1
+
+integrations:
+  - integration-name: Apache Beam
+    external-doc-url: https://beam.apache.org/
+    how-to-guide:
+      - /docs/apache-airflow-providers-apache-beam/operators.rst
+    tags: [apache]
+
+operators:
+  - integration-name: Apache Beam
+    python-modules:
+      - airflow.providers.apache.beam.operators.beam
+
+hooks:
+  - integration-name: Apache Beam
+    python-modules:
+      - airflow.providers.apache.beam.hooks.beam
+
+hook-class-names:
+  - airflow.providers.apache.beam.hooks.beam.BeamHook
diff --git a/airflow/providers/dependencies.json b/airflow/providers/dependencies.json
index 748b1a5..836020c 100644
--- a/airflow/providers/dependencies.json
+++ b/airflow/providers/dependencies.json
@@ -8,6 +8,9 @@
     "postgres",
     "ssh"
   ],
+  "apache.beam": [
+    "google"
+  ],
   "apache.druid": [
     "apache.hive"
   ],
@@ -30,6 +33,7 @@
   ],
   "google": [
     "amazon",
+    "apache.beam",
     "apache.cassandra",
     "cncf.kubernetes",
     "facebook",
diff --git a/airflow/providers/google/cloud/hooks/dataflow.py b/airflow/providers/google/cloud/hooks/dataflow.py
index da3e49c..f0986e6 100644
--- a/airflow/providers/google/cloud/hooks/dataflow.py
+++ b/airflow/providers/google/cloud/hooks/dataflow.py
@@ -19,23 +19,20 @@
 import functools
 import json
 import re
-import select
 import shlex
 import subprocess
-import textwrap
 import time
 import uuid
 import warnings
 from copy import deepcopy
-from tempfile import TemporaryDirectory
 from typing import Any, Callable, Dict, Generator, List, Optional, Sequence, Set, TypeVar, Union, cast
 
 from googleapiclient.discovery import build
 
 from airflow.exceptions import AirflowException
+from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType, beam_options_to_args
 from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
 from airflow.utils.log.logging_mixin import LoggingMixin
-from airflow.utils.python_virtualenv import prepare_virtualenv
 from airflow.utils.timeout import timeout
 
 # This is the default location
@@ -50,6 +47,35 @@ JOB_ID_PATTERN = re.compile(
 T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
 
 
+def process_line_and_extract_dataflow_job_id_callback(
+    on_new_job_id_callback: Optional[Callable[[str], None]]
+) -> Callable[[str], None]:
+    """
+    Returns callback which triggers function passed as `on_new_job_id_callback` when Dataflow job_id is found.
+    To be used for `process_line_callback` in
+    :py:class:`~airflow.providers.apache.beam.hooks.beam.BeamCommandRunner`
+
+    :param on_new_job_id_callback: Callback called when the job ID is known
+    :type on_new_job_id_callback: callback
+    """
+
+    def _process_line_and_extract_job_id(
+        line: str,
+        # on_new_job_id_callback: Optional[Callable[[str], None]]
+    ) -> None:
+        # Job id info: https://goo.gl/SE29y9.
+        matched_job = JOB_ID_PATTERN.search(line)
+        if matched_job:
+            job_id = matched_job.group("job_id_java") or matched_job.group("job_id_python")
+            if on_new_job_id_callback:
+                on_new_job_id_callback(job_id)
+
+    def wrap(line: str):
+        return _process_line_and_extract_job_id(line)
+
+    return wrap
+
+
 def _fallback_variable_parameter(parameter_name: str, variable_key_name: str) -> Callable[[T], T]:
     def _wrapper(func: T) -> T:
         """
@@ -482,98 +508,6 @@ class _DataflowJobsController(LoggingMixin):
             self.log.info("No jobs to cancel")
 
 
-class _DataflowRunner(LoggingMixin):
-    def __init__(
-        self,
-        cmd: List[str],
-        on_new_job_id_callback: Optional[Callable[[str], None]] = None,
-    ) -> None:
-        super().__init__()
-        self.log.info("Running command: %s", " ".join(shlex.quote(c) for c in cmd))
-        self.on_new_job_id_callback = on_new_job_id_callback
-        self.job_id: Optional[str] = None
-        self._proc = subprocess.Popen(
-            cmd,
-            shell=False,
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE,
-            close_fds=True,
-        )
-
-    def _process_fd(self, fd):
-        """
-        Prints output to logs and lookup for job ID in each line.
-
-        :param fd: File descriptor.
-        """
-        if fd == self._proc.stderr:
-            while True:
-                line = self._proc.stderr.readline().decode()
-                if not line:
-                    return
-                self._process_line_and_extract_job_id(line)
-                self.log.warning(line.rstrip("\n"))
-
-        if fd == self._proc.stdout:
-            while True:
-                line = self._proc.stdout.readline().decode()
-                if not line:
-                    return
-                self._process_line_and_extract_job_id(line)
-                self.log.info(line.rstrip("\n"))
-
-        raise Exception("No data in stderr or in stdout.")
-
-    def _process_line_and_extract_job_id(self, line: str) -> None:
-        """
-        Extracts job_id.
-
-        :param line: URL from which job_id has to be extracted
-        :type line: str
-        """
-        # Job id info: https://goo.gl/SE29y9.
-        matched_job = JOB_ID_PATTERN.search(line)
-        if matched_job:
-            job_id = matched_job.group("job_id_java") or matched_job.group("job_id_python")
-            self.log.info("Found Job ID: %s", job_id)
-            self.job_id = job_id
-            if self.on_new_job_id_callback:
-                self.on_new_job_id_callback(job_id)
-
-    def wait_for_done(self) -> Optional[str]:
-        """
-        Waits for Dataflow job to complete.
-
-        :return: Job id
-        :rtype: Optional[str]
-        """
-        self.log.info("Start waiting for DataFlow process to complete.")
-        self.job_id = None
-        reads = [self._proc.stderr, self._proc.stdout]
-        while True:
-            # Wait for at least one available fd.
-            readable_fds, _, _ = select.select(reads, [], [], 5)
-            if readable_fds is None:
-                self.log.info("Waiting for DataFlow process to complete.")
-                continue
-
-            for readable_fd in readable_fds:
-                self._process_fd(readable_fd)
-
-            if self._proc.poll() is not None:
-                break
-
-        # Corner case: check if more output was created between the last read and the process termination
-        for readable_fd in reads:
-            self._process_fd(readable_fd)
-
-        self.log.info("Process exited with return code: %s", self._proc.returncode)
-
-        if self._proc.returncode != 0:
-            raise Exception(f"DataFlow failed with return code {self._proc.returncode}")
-        return self.job_id
-
-
 class DataflowHook(GoogleBaseHook):
     """
     Hook for Google Dataflow.
@@ -596,6 +530,8 @@ class DataflowHook(GoogleBaseHook):
         self.drain_pipeline = drain_pipeline
         self.cancel_timeout = cancel_timeout
         self.wait_until_finished = wait_until_finished
+        self.job_id: Optional[str] = None
+        self.beam_hook = BeamHook(BeamRunnerType.DataflowRunner)
         super().__init__(
             gcp_conn_id=gcp_conn_id,
             delegate_to=delegate_to,
@@ -607,40 +543,6 @@ class DataflowHook(GoogleBaseHook):
         http_authorized = self._authorize()
         return build("dataflow", "v1b3", http=http_authorized, cache_discovery=False)
 
-    @GoogleBaseHook.provide_gcp_credential_file
-    def _start_dataflow(
-        self,
-        variables: dict,
-        name: str,
-        command_prefix: List[str],
-        project_id: str,
-        multiple_jobs: bool = False,
-        on_new_job_id_callback: Optional[Callable[[str], None]] = None,
-        location: str = DEFAULT_DATAFLOW_LOCATION,
-    ) -> None:
-        cmd = command_prefix + [
-            "--runner=DataflowRunner",
-            f"--project={project_id}",
-        ]
-        if variables:
-            cmd.extend(self._options_to_args(variables))
-        runner = _DataflowRunner(cmd=cmd, on_new_job_id_callback=on_new_job_id_callback)
-        job_id = runner.wait_for_done()
-        job_controller = _DataflowJobsController(
-            dataflow=self.get_conn(),
-            project_number=project_id,
-            name=name,
-            location=location,
-            poll_sleep=self.poll_sleep,
-            job_id=job_id,
-            num_retries=self.num_retries,
-            multiple_jobs=multiple_jobs,
-            drain_pipeline=self.drain_pipeline,
-            cancel_timeout=self.cancel_timeout,
-            wait_until_finished=self.wait_until_finished,
-        )
-        job_controller.wait_for_done()
-
     @_fallback_to_location_from_variables
     @_fallback_to_project_id_from_variables
     @GoogleBaseHook.fallback_to_default_project_id
@@ -678,22 +580,36 @@ class DataflowHook(GoogleBaseHook):
         :param location: Job location.
         :type location: str
         """
-        name = self._build_dataflow_job_name(job_name, append_job_name)
+        warnings.warn(
+            """"This method is deprecated.
+            Please use `airflow.providers.apache.beam.hooks.beam.start.start_java_pipeline`
+            to start pipeline and `providers.google.cloud.hooks.dataflow.DataflowHook.wait_for_done`
+            to wait for the required pipeline state.
+            """,
+            DeprecationWarning,
+            stacklevel=3,
+        )
+
+        name = self.build_dataflow_job_name(job_name, append_job_name)
+
         variables["jobName"] = name
         variables["region"] = location
+        variables["project"] = project_id
 
         if "labels" in variables:
             variables["labels"] = json.dumps(variables["labels"], separators=(",", ":"))
 
-        command_prefix = ["java", "-cp", jar, job_class] if job_class else ["java", "-jar", jar]
-        self._start_dataflow(
+        self.beam_hook.start_java_pipeline(
             variables=variables,
-            name=name,
-            command_prefix=command_prefix,
-            project_id=project_id,
-            multiple_jobs=multiple_jobs,
-            on_new_job_id_callback=on_new_job_id_callback,
+            jar=jar,
+            job_class=job_class,
+            process_line_callback=process_line_and_extract_dataflow_job_id_callback(on_new_job_id_callback),
+        )
+        self.wait_for_done(  # pylint: disable=no-value-for-parameter
+            job_name=name,
             location=location,
+            job_id=self.job_id,
+            multiple_jobs=multiple_jobs,
         )
 
     @_fallback_to_location_from_variables
@@ -746,7 +662,7 @@ class DataflowHook(GoogleBaseHook):
 
         :type environment: Optional[dict]
         """
-        name = self._build_dataflow_job_name(job_name, append_job_name)
+        name = self.build_dataflow_job_name(job_name, append_job_name)
 
         environment = environment or {}
         # available keys for runtime environment are listed here:
@@ -919,58 +835,40 @@ class DataflowHook(GoogleBaseHook):
         :param location: Job location.
         :type location: str
         """
-        name = self._build_dataflow_job_name(job_name, append_job_name)
+        warnings.warn(
+            """This method is deprecated.
+            Please use `airflow.providers.apache.beam.hooks.beam.start.start_python_pipeline`
+            to start pipeline and `providers.google.cloud.hooks.dataflow.DataflowHook.wait_for_done`
+            to wait for the required pipeline state.
+            """,
+            DeprecationWarning,
+            stacklevel=3,
+        )
+
+        name = self.build_dataflow_job_name(job_name, append_job_name)
         variables["job_name"] = name
         variables["region"] = location
+        variables["project"] = project_id
 
-        if "labels" in variables:
-            variables["labels"] = [f"{key}={value}" for key, value in variables["labels"].items()]
-
-        if py_requirements is not None:
-            if not py_requirements and not py_system_site_packages:
-                warning_invalid_environment = textwrap.dedent(
-                    """\
-                    Invalid method invocation. You have disabled inclusion of system packages and empty list
-                    required for installation, so it is not possible to create a valid virtual environment.
-                    In the virtual environment, apache-beam package must be installed for your job to be \
-                    executed. To fix this problem:
-                    * install apache-beam on the system, then set parameter py_system_site_packages to True,
-                    * add apache-beam to the list of required packages in parameter py_requirements.
-                    """
-                )
-                raise AirflowException(warning_invalid_environment)
-
-            with TemporaryDirectory(prefix="dataflow-venv") as tmp_dir:
-                py_interpreter = prepare_virtualenv(
-                    venv_directory=tmp_dir,
-                    python_bin=py_interpreter,
-                    system_site_packages=py_system_site_packages,
-                    requirements=py_requirements,
-                )
-                command_prefix = [py_interpreter] + py_options + [dataflow]
-
-                self._start_dataflow(
-                    variables=variables,
-                    name=name,
-                    command_prefix=command_prefix,
-                    project_id=project_id,
-                    on_new_job_id_callback=on_new_job_id_callback,
-                    location=location,
-                )
-        else:
-            command_prefix = [py_interpreter] + py_options + [dataflow]
-
-            self._start_dataflow(
-                variables=variables,
-                name=name,
-                command_prefix=command_prefix,
-                project_id=project_id,
-                on_new_job_id_callback=on_new_job_id_callback,
-                location=location,
-            )
+        self.beam_hook.start_python_pipeline(
+            variables=variables,
+            py_file=dataflow,
+            py_options=py_options,
+            py_interpreter=py_interpreter,
+            py_requirements=py_requirements,
+            py_system_site_packages=py_system_site_packages,
+            process_line_callback=process_line_and_extract_dataflow_job_id_callback(on_new_job_id_callback),
+        )
+
+        self.wait_for_done(  # pylint: disable=no-value-for-parameter
+            job_name=name,
+            location=location,
+            job_id=self.job_id,
+        )
 
     @staticmethod
-    def _build_dataflow_job_name(job_name: str, append_job_name: bool = True) -> str:
+    def build_dataflow_job_name(job_name: str, append_job_name: bool = True) -> str:
+        """Builds Dataflow job name."""
         base_job_name = str(job_name).replace("_", "-")
 
         if not re.match(r"^[a-z]([-a-z0-9]*[a-z0-9])?$", base_job_name):
@@ -987,23 +885,6 @@ class DataflowHook(GoogleBaseHook):
 
         return safe_job_name
 
-    @staticmethod
-    def _options_to_args(variables: dict) -> List[str]:
-        if not variables:
-            return []
-        # The logic of this method should be compatible with Apache Beam:
-        # https://github.com/apache/beam/blob/b56740f0e8cd80c2873412847d0b336837429fb9/sdks/python/
-        # apache_beam/options/pipeline_options.py#L230-L251
-        args: List[str] = []
-        for attr, value in variables.items():
-            if value is None or (isinstance(value, bool) and value):
-                args.append(f"--{attr}")
-            elif isinstance(value, list):
-                args.extend([f"--{attr}={v}" for v in value])
-            else:
-                args.append(f"--{attr}={value}")
-        return args
-
     @_fallback_to_location_from_variables
     @_fallback_to_project_id_from_variables
     @GoogleBaseHook.fallback_to_default_project_id
@@ -1123,7 +1004,7 @@ class DataflowHook(GoogleBaseHook):
             "--format=value(job.id)",
             f"--job-name={job_name}",
             f"--region={location}",
-            *(self._options_to_args(options)),
+            *(beam_options_to_args(options)),
         ]
         self.log.info("Executing command: %s", " ".join([shlex.quote(c) for c in cmd]))
         with self.provide_authorized_gcloud():
@@ -1264,3 +1145,44 @@ class DataflowHook(GoogleBaseHook):
             location=location,
         )
         return jobs_controller.fetch_job_autoscaling_events_by_id(job_id)
+
+    @GoogleBaseHook.fallback_to_default_project_id
+    def wait_for_done(
+        self,
+        job_name: str,
+        location: str,
+        project_id: str,
+        job_id: Optional[str] = None,
+        multiple_jobs: bool = False,
+    ) -> None:
+        """
+        Wait for Dataflow job.
+
+        :param job_name: The 'jobName' to use when executing the DataFlow job
+            (templated). This ends up being set in the pipeline options, so any entry
+            with key ``'jobName'`` in ``options`` will be overwritten.
+        :type job_name: str
+        :param location: location the job is running
+        :type location: str
+        :param project_id: Optional, the Google Cloud project ID in which to start a job.
+            If set to None or missing, the default project_id from the Google Cloud connection is used.
+        :type project_id:
+        :param job_id: a Dataflow job ID
+        :type job_id: str
+        :param multiple_jobs: If pipeline creates multiple jobs then monitor all jobs
+        :type multiple_jobs: boolean
+        """
+        job_controller = _DataflowJobsController(
+            dataflow=self.get_conn(),
+            project_number=project_id,
+            name=job_name,
+            location=location,
+            poll_sleep=self.poll_sleep,
+            job_id=job_id or self.job_id,
+            num_retries=self.num_retries,
+            multiple_jobs=multiple_jobs,
+            drain_pipeline=self.drain_pipeline,
+            cancel_timeout=self.cancel_timeout,
+            wait_until_finished=self.wait_until_finished,
+        )
+        job_controller.wait_for_done()
diff --git a/airflow/providers/google/cloud/operators/dataflow.py b/airflow/providers/google/cloud/operators/dataflow.py
index 49863dc..f977704 100644
--- a/airflow/providers/google/cloud/operators/dataflow.py
+++ b/airflow/providers/google/cloud/operators/dataflow.py
@@ -16,15 +16,20 @@
 # specific language governing permissions and limitations
 # under the License.
 """This module contains Google Dataflow operators."""
-
 import copy
 import re
+import warnings
 from contextlib import ExitStack
 from enum import Enum
 from typing import Any, Dict, List, Optional, Sequence, Union
 
 from airflow.models import BaseOperator
-from airflow.providers.google.cloud.hooks.dataflow import DEFAULT_DATAFLOW_LOCATION, DataflowHook
+from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType
+from airflow.providers.google.cloud.hooks.dataflow import (
+    DEFAULT_DATAFLOW_LOCATION,
+    DataflowHook,
+    process_line_and_extract_dataflow_job_id_callback,
+)
 from airflow.providers.google.cloud.hooks.gcs import GCSHook
 from airflow.utils.decorators import apply_defaults
 from airflow.version import version
@@ -43,12 +48,137 @@ class CheckJobRunning(Enum):
     WaitForRun = 3
 
 
+class DataflowConfiguration:
+    """Dataflow configuration that can be passed to
+    :py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator` and
+    :py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator`.
+
+    :param job_name: The 'jobName' to use when executing the DataFlow job
+        (templated). This ends up being set in the pipeline options, so any entry
+        with key ``'jobName'`` or  ``'job_name'``in ``options`` will be overwritten.
+    :type job_name: str
+    :param append_job_name: True if unique suffix has to be appended to job name.
+    :type append_job_name: bool
+    :param project_id: Optional, the Google Cloud project ID in which to start a job.
+        If set to None or missing, the default project_id from the Google Cloud connection is used.
+    :type project_id: str
+    :param location: Job location.
+    :type location: str
+    :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
+    :type gcp_conn_id: str
+    :param delegate_to: The account to impersonate using domain-wide delegation of authority,
+        if any. For this to work, the service account making the request must have
+        domain-wide delegation enabled.
+    :type delegate_to: str
+    :param poll_sleep: The time in seconds to sleep between polling Google
+        Cloud Platform for the dataflow job status while the job is in the
+        JOB_STATE_RUNNING state.
+    :type poll_sleep: int
+    :param impersonation_chain: Optional service account to impersonate using short-term
+        credentials, or chained list of accounts required to get the access_token
+        of the last account in the list, which will be impersonated in the request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding identity, with first
+        account from the list granting this role to the originating account (templated).
+    :type impersonation_chain: Union[str, Sequence[str]]
+    :param drain_pipeline: Optional, set to True if want to stop streaming job by draining it
+        instead of canceling during during killing task instance. See:
+        https://cloud.google.com/dataflow/docs/guides/stopping-a-pipeline
+    :type drain_pipeline: bool
+    :param cancel_timeout: How long (in seconds) operator should wait for the pipeline to be
+        successfully cancelled when task is being killed.
+    :type cancel_timeout: Optional[int]
+    :param wait_until_finished: (Optional)
+        If True, wait for the end of pipeline execution before exiting.
+        If False, only submits job.
+        If None, default behavior.
+
+        The default behavior depends on the type of pipeline:
+
+        * for the streaming pipeline, wait for jobs to start,
+        * for the batch pipeline, wait for the jobs to complete.
+
+        .. warning::
+
+            You cannot call ``PipelineResult.wait_until_finish`` method in your pipeline code for the operator
+            to work properly. i. e. you must use asynchronous execution. Otherwise, your pipeline will
+            always wait until finished. For more information, look at:
+            `Asynchronous execution
+            <https://cloud.google.com/dataflow/docs/guides/specifying-exec-params#python_10>`__
+
+        The process of starting the Dataflow job in Airflow consists of two steps:
+
+        * running a subprocess and reading the stderr/stderr log for the job id.
+        * loop waiting for the end of the job ID from the previous step.
+          This loop checks the status of the job.
+
+        Step two is started just after step one has finished, so if you have wait_until_finished in your
+        pipeline code, step two will not start until the process stops. When this process stops,
+        steps two will run, but it will only execute one iteration as the job will be in a terminal state.
+
+        If you in your pipeline do not call the wait_for_pipeline method but pass wait_until_finish=True
+        to the operator, the second loop will wait for the job's terminal state.
+
+        If you in your pipeline do not call the wait_for_pipeline method, and pass wait_until_finish=False
+        to the operator, the second loop will check once is job not in terminal state and exit the loop.
+    :type wait_until_finished: Optional[bool]
+    :param multiple_jobs: If pipeline creates multiple jobs then monitor all jobs. Supported only by
+        :py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator`
+    :type multiple_jobs: boolean
+    :param check_if_running: Before running job, validate that a previous run is not in process.
+        IgnoreJob = do not check if running.
+        FinishIfRunning = if job is running finish with nothing.
+        WaitForRun = wait until job finished and the run job.
+        Supported only by:
+        :py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator`
+    :type check_if_running: CheckJobRunning
+    """
+
+    template_fields = ["job_name", "location"]
+
+    def __init__(
+        self,
+        *,
+        job_name: Optional[str] = "{{task.task_id}}",
+        append_job_name: bool = True,
+        project_id: Optional[str] = None,
+        location: Optional[str] = DEFAULT_DATAFLOW_LOCATION,
+        gcp_conn_id: str = "google_cloud_default",
+        delegate_to: Optional[str] = None,
+        poll_sleep: int = 10,
+        impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        drain_pipeline: bool = False,
+        cancel_timeout: Optional[int] = 5 * 60,
+        wait_until_finished: Optional[bool] = None,
+        multiple_jobs: Optional[bool] = None,
+        check_if_running: CheckJobRunning = CheckJobRunning.WaitForRun,
+    ) -> None:
+        self.job_name = job_name
+        self.append_job_name = append_job_name
+        self.project_id = project_id
+        self.location = location
+        self.gcp_conn_id = gcp_conn_id
+        self.delegate_to = delegate_to
+        self.poll_sleep = poll_sleep
+        self.impersonation_chain = impersonation_chain
+        self.drain_pipeline = drain_pipeline
+        self.cancel_timeout = cancel_timeout
+        self.wait_until_finished = wait_until_finished
+        self.multiple_jobs = multiple_jobs
+        self.check_if_running = check_if_running
+
+
 # pylint: disable=too-many-instance-attributes
 class DataflowCreateJavaJobOperator(BaseOperator):
     """
     Start a Java Cloud DataFlow batch job. The parameters of the operation
     will be passed to the job.
 
+    This class is deprecated.
+    Please use `providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator`.
+
     **Example**: ::
 
         default_args = {
@@ -235,6 +365,14 @@ class DataflowCreateJavaJobOperator(BaseOperator):
         wait_until_finished: Optional[bool] = None,
         **kwargs,
     ) -> None:
+        # TODO: Remove one day
+        warnings.warn(
+            "The `{cls}` operator is deprecated, please use "
+            "`providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator` instead."
+            "".format(cls=self.__class__.__name__),
+            DeprecationWarning,
+            stacklevel=2,
+        )
         super().__init__(**kwargs)
 
         dataflow_default_options = dataflow_default_options or {}
@@ -257,62 +395,83 @@ class DataflowCreateJavaJobOperator(BaseOperator):
         self.cancel_timeout = cancel_timeout
         self.wait_until_finished = wait_until_finished
         self.job_id = None
-        self.hook = None
+        self.beam_hook: Optional[BeamHook] = None
+        self.dataflow_hook: Optional[DataflowHook] = None
 
     def execute(self, context):
-        self.hook = DataflowHook(
+        """Execute the Apache Beam Pipeline."""
+        self.beam_hook = BeamHook(runner=BeamRunnerType.DataflowRunner)
+        self.dataflow_hook = DataflowHook(
             gcp_conn_id=self.gcp_conn_id,
             delegate_to=self.delegate_to,
             poll_sleep=self.poll_sleep,
             cancel_timeout=self.cancel_timeout,
             wait_until_finished=self.wait_until_finished,
         )
-        dataflow_options = copy.copy(self.dataflow_default_options)
-        dataflow_options.update(self.options)
-        is_running = False
-        if self.check_if_running != CheckJobRunning.IgnoreJob:
-            is_running = self.hook.is_job_dataflow_running(  # type: ignore[attr-defined]
-                name=self.job_name,
-                variables=dataflow_options,
-                project_id=self.project_id,
-                location=self.location,
-            )
-            while is_running and self.check_if_running == CheckJobRunning.WaitForRun:
-                is_running = self.hook.is_job_dataflow_running(  # type: ignore[attr-defined]
-                    name=self.job_name,
-                    variables=dataflow_options,
-                    project_id=self.project_id,
-                    location=self.location,
-                )
+        job_name = self.dataflow_hook.build_dataflow_job_name(job_name=self.job_name)
+        pipeline_options = copy.deepcopy(self.dataflow_default_options)
+
+        pipeline_options["jobName"] = self.job_name
+        pipeline_options["project"] = self.project_id or self.dataflow_hook.project_id
+        pipeline_options["region"] = self.location
+        pipeline_options.update(self.options)
+        pipeline_options.setdefault("labels", {}).update(
+            {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")}
+        )
+        pipeline_options.update(self.options)
 
-        if not is_running:
-            with ExitStack() as exit_stack:
-                if self.jar.lower().startswith("gs://"):
-                    gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to)
-                    tmp_gcs_file = exit_stack.enter_context(  # pylint: disable=no-member
-                        gcs_hook.provide_file(object_url=self.jar)
-                    )
-                    self.jar = tmp_gcs_file.name
-
-                def set_current_job_id(job_id):
-                    self.job_id = job_id
-
-                self.hook.start_java_dataflow(  # type: ignore[attr-defined]
-                    job_name=self.job_name,
-                    variables=dataflow_options,
-                    jar=self.jar,
-                    job_class=self.job_class,
-                    append_job_name=True,
-                    multiple_jobs=self.multiple_jobs,
-                    on_new_job_id_callback=set_current_job_id,
-                    project_id=self.project_id,
-                    location=self.location,
+        def set_current_job_id(job_id):
+            self.job_id = job_id
+
+        process_line_callback = process_line_and_extract_dataflow_job_id_callback(
+            on_new_job_id_callback=set_current_job_id
+        )
+
+        with ExitStack() as exit_stack:
+            if self.jar.lower().startswith("gs://"):
+                gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to)
+                tmp_gcs_file = exit_stack.enter_context(  # pylint: disable=no-member
+                    gcs_hook.provide_file(object_url=self.jar)
                 )
+                self.jar = tmp_gcs_file.name
+
+                is_running = False
+                if self.check_if_running != CheckJobRunning.IgnoreJob:
+                    is_running = (
+                        self.dataflow_hook.is_job_dataflow_running(  # pylint: disable=no-value-for-parameter
+                            name=self.job_name,
+                            variables=pipeline_options,
+                        )
+                    )
+                    while is_running and self.check_if_running == CheckJobRunning.WaitForRun:
+                        # pylint: disable=no-value-for-parameter
+                        is_running = self.dataflow_hook.is_job_dataflow_running(
+                            name=self.job_name,
+                            variables=pipeline_options,
+                        )
+                if not is_running:
+                    pipeline_options["jobName"] = job_name
+                    self.beam_hook.start_java_pipeline(
+                        variables=pipeline_options,
+                        jar=self.jar,
+                        job_class=self.job_class,
+                        process_line_callback=process_line_callback,
+                    )
+                    self.dataflow_hook.wait_for_done(  # pylint: disable=no-value-for-parameter
+                        job_name=job_name,
+                        location=self.location,
+                        job_id=self.job_id,
+                        multiple_jobs=self.multiple_jobs,
+                    )
+
+        return {"job_id": self.job_id}
 
     def on_kill(self) -> None:
         self.log.info("On kill.")
         if self.job_id:
-            self.hook.cancel_job(job_id=self.job_id, project_id=self.project_id)
+            self.dataflow_hook.cancel_job(
+                job_id=self.job_id, project_id=self.project_id or self.dataflow_hook.project_id
+            )
 
 
 # pylint: disable=too-many-instance-attributes
@@ -760,6 +919,9 @@ class DataflowCreatePythonJobOperator(BaseOperator):
     high-level options, for instances, project and zone information, which
     apply to all dataflow operators in the DAG.
 
+    This class is deprecated.
+    Please use `providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator`.
+
     .. seealso::
         For more detail on job submission have a look at the reference:
         https://cloud.google.com/dataflow/pipelines/specifying-exec-params
@@ -886,7 +1048,14 @@ class DataflowCreatePythonJobOperator(BaseOperator):
         wait_until_finished: Optional[bool] = None,
         **kwargs,
     ) -> None:
-
+        # TODO: Remove one day
+        warnings.warn(
+            "The `{cls}` operator is deprecated, please use "
+            "`providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator` instead."
+            "".format(cls=self.__class__.__name__),
+            DeprecationWarning,
+            stacklevel=2,
+        )
         super().__init__(**kwargs)
 
         self.py_file = py_file
@@ -909,10 +1078,40 @@ class DataflowCreatePythonJobOperator(BaseOperator):
         self.cancel_timeout = cancel_timeout
         self.wait_until_finished = wait_until_finished
         self.job_id = None
-        self.hook: Optional[DataflowHook] = None
+        self.beam_hook: Optional[BeamHook] = None
+        self.dataflow_hook: Optional[DataflowHook] = None
 
     def execute(self, context):
         """Execute the python dataflow job."""
+        self.beam_hook = BeamHook(runner=BeamRunnerType.DataflowRunner)
+        self.dataflow_hook = DataflowHook(
+            gcp_conn_id=self.gcp_conn_id,
+            delegate_to=self.delegate_to,
+            poll_sleep=self.poll_sleep,
+            impersonation_chain=None,
+            drain_pipeline=self.drain_pipeline,
+            cancel_timeout=self.cancel_timeout,
+            wait_until_finished=self.wait_until_finished,
+        )
+
+        job_name = self.dataflow_hook.build_dataflow_job_name(job_name=self.job_name)
+        pipeline_options = self.dataflow_default_options.copy()
+        pipeline_options["job_name"] = job_name
+        pipeline_options["project"] = self.project_id or self.dataflow_hook.project_id
+        pipeline_options["region"] = self.location
+        pipeline_options.update(self.options)
+
+        # Convert argument names from lowerCamelCase to snake case.
+        camel_to_snake = lambda name: re.sub(r"[A-Z]", lambda x: "_" + x.group(0).lower(), name)
+        formatted_pipeline_options = {camel_to_snake(key): pipeline_options[key] for key in pipeline_options}
+
+        def set_current_job_id(job_id):
+            self.job_id = job_id
+
+        process_line_callback = process_line_and_extract_dataflow_job_id_callback(
+            on_new_job_id_callback=set_current_job_id
+        )
+
         with ExitStack() as exit_stack:
             if self.py_file.lower().startswith("gs://"):
                 gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to)
@@ -921,38 +1120,28 @@ class DataflowCreatePythonJobOperator(BaseOperator):
                 )
                 self.py_file = tmp_gcs_file.name
 
-            self.hook = DataflowHook(
-                gcp_conn_id=self.gcp_conn_id,
-                delegate_to=self.delegate_to,
-                poll_sleep=self.poll_sleep,
-                drain_pipeline=self.drain_pipeline,
-                cancel_timeout=self.cancel_timeout,
-                wait_until_finished=self.wait_until_finished,
-            )
-            dataflow_options = self.dataflow_default_options.copy()
-            dataflow_options.update(self.options)
-            # Convert argument names from lowerCamelCase to snake case.
-            camel_to_snake = lambda name: re.sub(r"[A-Z]", lambda x: "_" + x.group(0).lower(), name)
-            formatted_options = {camel_to_snake(key): dataflow_options[key] for key in dataflow_options}
-
-            def set_current_job_id(job_id):
-                self.job_id = job_id
-
-            self.hook.start_python_dataflow(  # type: ignore[attr-defined]
-                job_name=self.job_name,
-                variables=formatted_options,
-                dataflow=self.py_file,
+            self.beam_hook.start_python_pipeline(
+                variables=formatted_pipeline_options,
+                py_file=self.py_file,
                 py_options=self.py_options,
                 py_interpreter=self.py_interpreter,
                 py_requirements=self.py_requirements,
                 py_system_site_packages=self.py_system_site_packages,
-                on_new_job_id_callback=set_current_job_id,
-                project_id=self.project_id,
+                process_line_callback=process_line_callback,
+            )
+
+            self.dataflow_hook.wait_for_done(  # pylint: disable=no-value-for-parameter
+                job_name=job_name,
                 location=self.location,
+                job_id=self.job_id,
+                multiple_jobs=False,
             )
-            return {"job_id": self.job_id}
+
+        return {"job_id": self.job_id}
 
     def on_kill(self) -> None:
         self.log.info("On kill.")
         if self.job_id:
-            self.hook.cancel_job(job_id=self.job_id, project_id=self.project_id)
+            self.dataflow_hook.cancel_job(
+                job_id=self.job_id, project_id=self.project_id or self.dataflow_hook.project_id
+            )
diff --git a/dev/provider_packages/copy_provider_package_sources.py b/dev/provider_packages/copy_provider_package_sources.py
index 1d10747..c7f75f5 100755
--- a/dev/provider_packages/copy_provider_package_sources.py
+++ b/dev/provider_packages/copy_provider_package_sources.py
@@ -703,6 +703,67 @@ class RefactorBackportPackages:
             .rename("airflow.models.baseoperator")
         )
 
+    def refactor_apache_beam_package(self):
+        r"""
+        Fixes to "apache_beam" providers package.
+
+        Copies some of the classes used from core Airflow to "common.utils" package of the
+        the provider and renames imports to use them from there. Note that in this case we also rename
+        the imports in the copied files.
+
+        For example we copy python_virtualenv.py, process_utils.py and change import as in example diff:
+
+        .. code-block:: diff
+
+            --- ./airflow/providers/apache/beam/common/utils/python_virtualenv.py
+            +++ ./airflow/providers/apache/beam/common/utils/python_virtualenv.py
+            @@ -21,7 +21,7 @@
+             \"\"\"
+            from typing import List, Optional
+
+            -from airflow.utils.process_utils import execute_in_subprocess
+            +from airflow.providers.apache.beam.common.utils.process_utils import execute_in_subprocess
+
+
+            def _generate_virtualenv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool)
+
+        """
+
+        def apache_beam_package_filter(node: LN, capture: Capture, filename: Filename) -> bool:
+            return filename.startswith("./airflow/providers/apache/beam")
+
+        os.makedirs(
+            os.path.join(get_target_providers_package_folder("apache.beam"), "common", "utils"), exist_ok=True
+        )
+        copyfile(
+            os.path.join(get_source_airflow_folder(), "airflow", "utils", "__init__.py"),
+            os.path.join(
+                get_target_providers_package_folder("apache.beam"), "common", "utils", "__init__.py"
+            ),
+        )
+        copyfile(
+            os.path.join(get_source_airflow_folder(), "airflow", "utils", "python_virtualenv.py"),
+            os.path.join(
+                get_target_providers_package_folder("apache.beam"), "common", "utils", "python_virtualenv.py"
+            ),
+        )
+        copyfile(
+            os.path.join(get_source_airflow_folder(), "airflow", "utils", "process_utils.py"),
+            os.path.join(
+                get_target_providers_package_folder("apache.beam"), "common", "utils", "process_utils.py"
+            ),
+        )
+        (
+            self.qry.select_module("airflow.utils.python_virtualenv")
+            .filter(callback=apache_beam_package_filter)
+            .rename("airflow.providers.apache.beam.common.utils.python_virtualenv")
+        )
+        (
+            self.qry.select_module("airflow.utils.process_utils")
+            .filter(callback=apache_beam_package_filter)
+            .rename("airflow.providers.apache.beam.common.utils.process_utils")
+        )
+
     def refactor_odbc_package(self):
         """
         Fixes to "odbc" providers package.
@@ -760,6 +821,7 @@ class RefactorBackportPackages:
         self.rename_deprecated_modules()
         self.refactor_amazon_package()
         self.refactor_google_package()
+        self.refactor_apache_beam_package()
         self.refactor_elasticsearch_package()
         self.refactor_odbc_package()
         self.remove_tags()
diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py
index 322a57f..3cfc39f 100755
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -790,8 +790,10 @@ def convert_git_changes_to_table(
                 f"`{message_without_backticks}`" if markdown else f"``{message_without_backticks}``",
             )
         )
-    table = tabulate(table_data, headers=headers, tablefmt="pipe" if markdown else "rst")
     header = ""
+    if not table_data:
+        return header
+    table = tabulate(table_data, headers=headers, tablefmt="pipe" if markdown else "rst")
     if not markdown:
         header += f"\n\n{print_version}\n" + "." * len(print_version) + "\n\n"
         release_date = table_data[0][1]
diff --git a/docs/apache-airflow-providers-apache-beam/index.rst b/docs/apache-airflow-providers-apache-beam/index.rst
new file mode 100644
index 0000000..30718f9
--- /dev/null
+++ b/docs/apache-airflow-providers-apache-beam/index.rst
@@ -0,0 +1,36 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+``apache-airflow-providers-apache-beam``
+========================================
+
+Content
+-------
+
+.. toctree::
+    :maxdepth: 1
+    :caption: References
+
+    Python API <_api/airflow/providers/apache/beam/index>
+    PyPI Repository <https://pypi.org/project/apache-airflow-providers-apache-beam/>
+    Example DAGs <https://github.com/apache/airflow/tree/master/airflow/providers/apache/beam/example_dags>
+
+.. toctree::
+    :maxdepth: 1
+    :caption: Guides
+
+    Operators <operators>
diff --git a/docs/apache-airflow-providers-apache-beam/operators.rst b/docs/apache-airflow-providers-apache-beam/operators.rst
new file mode 100644
index 0000000..3c1b2bd
--- /dev/null
+++ b/docs/apache-airflow-providers-apache-beam/operators.rst
@@ -0,0 +1,116 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+Apache Beam Operators
+=====================
+
+`Apache Beam <https://beam.apache.org/>`__ is an open source, unified model for defining both batch and
+streaming data-parallel processing pipelines. Using one of the open source Beam SDKs, you build a program
+that defines the pipeline. The pipeline is then executed by one of Beam’s supported distributed processing
+back-ends, which include Apache Flink, Apache Spark, and Google Cloud Dataflow.
+
+
+.. _howto/operator:BeamRunPythonPipelineOperator:
+
+Run Python Pipelines in Apache Beam
+===================================
+
+The ``py_file`` argument must be specified for
+:class:`~airflow.providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator`
+as it contains the pipeline to be executed by Beam. The Python file can be available on GCS that Airflow
+has the ability to download or available on the local filesystem (provide the absolute path to it).
+
+The ``py_interpreter`` argument specifies the Python version to be used when executing the pipeline, the default
+is ``python3`. If your Airflow instance is running on Python 2 - specify ``python2`` and ensure your ``py_file`` is
+in Python 2. For best results, use Python 3.
+
+If ``py_requirements`` argument is specified a temporary Python virtual environment with specified requirements will be created
+and within it pipeline will run.
+
+The ``py_system_site_packages`` argument specifies whether or not all the Python packages from your Airflow instance,
+will be accessible within virtual environment (if ``py_requirements`` argument is specified),
+recommend avoiding unless the Dataflow job requires it.
+
+Python Pipelines with DirectRunner
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_start_python_direct_runner_pipeline_local_file]
+    :end-before: [END howto_operator_start_python_direct_runner_pipeline_local_file]
+
+.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_start_python_direct_runner_pipeline_gcs_file]
+    :end-before: [END howto_operator_start_python_direct_runner_pipeline_gcs_file]
+
+Python Pipelines with DataflowRunner
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_start_python_dataflow_runner_pipeline_gcs_file]
+    :end-before: [END howto_operator_start_python_dataflow_runner_pipeline_gcs_file]
+
+.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file]
+    :end-before: [END howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file]
+
+.. _howto/operator:BeamRunJavaPipelineOperator:
+
+Run Java Pipelines in Apache Beam
+=================================
+
+For Java pipeline the ``jar`` argument must be specified for
+:class:`~airflow.providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator`
+as it contains the pipeline to be executed by Apache Beam. The JAR can be available on GCS that Airflow
+has the ability to download or available on the local filesystem (provide the absolute path to it).
+
+Java Pipelines with DirectRunner
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_start_java_direct_runner_pipeline]
+    :end-before: [END howto_operator_start_java_direct_runner_pipeline
+
+Java Pipelines with DataflowRunner
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_start_java_dataflow_runner_pipeline]
+    :end-before: [END howto_operator_start_java_dataflow_runner_pipeline
+
+Reference
+^^^^^^^^^
+
+For further information, look at:
+
+* `Apache Beam Documentation <https://beam.apache.org/documentation/>`__
+* `Google Cloud API Documentation <https://cloud.google.com/dataflow/docs/apis>`__
+* `Product Documentation <https://cloud.google.com/dataflow/docs/>`__
+* `Dataflow Monitoring Interface <https://cloud.google.com/dataflow/docs/guides/using-monitoring-intf/>`__
+* `Dataflow Command-line Interface <https://cloud.google.com/dataflow/docs/guides/using-command-line-intf/>`__
diff --git a/docs/apache-airflow/extra-packages-ref.rst b/docs/apache-airflow/extra-packages-ref.rst
index b2549ae..5221beb 100644
--- a/docs/apache-airflow/extra-packages-ref.rst
+++ b/docs/apache-airflow/extra-packages-ref.rst
@@ -107,6 +107,8 @@ custom bash/python providers).
 +=====================+=====================================================+================================================+
 | apache.atlas        | ``pip install 'apache-airflow[apache.atlas]'``      | Apache Atlas                                   |
 +---------------------+-----------------------------------------------------+------------------------------------------------+
+| apache.beam         | ``pip install 'apache-airflow[apache.beam]'``       | Apache Beam operators & hooks                  |
++---------------------+-----------------------------------------------------+------------------------------------------------+
 | apache.cassandra    | ``pip install 'apache-airflow[apache.cassandra]'``  | Cassandra related operators & hooks            |
 +---------------------+-----------------------------------------------------+------------------------------------------------+
 | apache.druid        | ``pip install 'apache-airflow[apache.druid]'``      | Druid related operators & hooks                |
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index db4342a..f8f8f83 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -141,6 +141,7 @@ Fileshares
 Filesystem
 Firehose
 Firestore
+Flink
 FluentD
 Fokko
 Formaturas
@@ -325,6 +326,7 @@ Seki
 Sendgrid
 Siddharth
 SlackHook
+Spark
 SparkPi
 SparkR
 SparkSQL
diff --git a/scripts/in_container/run_install_and_test_provider_packages.sh b/scripts/in_container/run_install_and_test_provider_packages.sh
index 969fa29..9b951c7 100755
--- a/scripts/in_container/run_install_and_test_provider_packages.sh
+++ b/scripts/in_container/run_install_and_test_provider_packages.sh
@@ -95,7 +95,7 @@ function discover_all_provider_packages() {
     # Columns is to force it wider, so it doesn't wrap at 80 characters
     COLUMNS=180 airflow providers list
 
-    local expected_number_of_providers=62
+    local expected_number_of_providers=63
     local actual_number_of_providers
     actual_providers=$(airflow providers list --output yaml | grep package_name)
     actual_number_of_providers=$(wc -l <<<"$actual_providers")
diff --git a/setup.py b/setup.py
index 210b12f..50f6a2f 100644
--- a/setup.py
+++ b/setup.py
@@ -523,6 +523,7 @@ devel_hadoop = devel_minreq + hdfs + hive + kerberos + presto + webhdfs
 # Dict of all providers which are part of the Apache Airflow repository together with their requirements
 PROVIDERS_REQUIREMENTS: Dict[str, List[str]] = {
     'amazon': amazon,
+    'apache.beam': apache_beam,
     'apache.cassandra': cassandra,
     'apache.druid': druid,
     'apache.hdfs': hdfs,
diff --git a/tests/core/test_providers_manager.py b/tests/core/test_providers_manager.py
index 7d80c58..39ee588 100644
--- a/tests/core/test_providers_manager.py
+++ b/tests/core/test_providers_manager.py
@@ -22,6 +22,7 @@ from airflow.providers_manager import ProvidersManager
 
 ALL_PROVIDERS = [
     'apache-airflow-providers-amazon',
+    'apache-airflow-providers-apache-beam',
     'apache-airflow-providers-apache-cassandra',
     'apache-airflow-providers-apache-druid',
     'apache-airflow-providers-apache-hdfs',
diff --git a/tests/providers/apache/beam/__init__.py b/tests/providers/apache/beam/__init__.py
new file mode 100644
index 0000000..13a8339
--- /dev/null
+++ b/tests/providers/apache/beam/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/providers/apache/beam/hooks/__init__.py b/tests/providers/apache/beam/hooks/__init__.py
new file mode 100644
index 0000000..13a8339
--- /dev/null
+++ b/tests/providers/apache/beam/hooks/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/providers/apache/beam/hooks/test_beam.py b/tests/providers/apache/beam/hooks/test_beam.py
new file mode 100644
index 0000000..d0d713e
--- /dev/null
+++ b/tests/providers/apache/beam/hooks/test_beam.py
@@ -0,0 +1,271 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import copy
+import subprocess
+import unittest
+from unittest import mock
+from unittest.mock import MagicMock
+
+from parameterized import parameterized
+
+from airflow.exceptions import AirflowException
+from airflow.providers.apache.beam.hooks.beam import BeamCommandRunner, BeamHook, beam_options_to_args
+
+PY_FILE = 'apache_beam.examples.wordcount'
+JAR_FILE = 'unitest.jar'
+JOB_CLASS = 'com.example.UnitTest'
+PY_OPTIONS = ['-m']
+TEST_JOB_ID = 'test-job-id'
+
+DEFAULT_RUNNER = "DirectRunner"
+BEAM_STRING = 'airflow.providers.apache.beam.hooks.beam.{}'
+BEAM_VARIABLES_PY = {'output': 'gs://test/output', 'labels': {'foo': 'bar'}}
+BEAM_VARIABLES_JAVA = {
+    'output': 'gs://test/output',
+    'labels': {'foo': 'bar'},
+}
+
+APACHE_BEAM_V_2_14_0_JAVA_SDK_LOG = f""""\
+Dataflow SDK version: 2.14.0
+Jun 15, 2020 2:57:28 PM org.apache.beam.runners.dataflow.DataflowRunner run
+INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow\
+/jobsDetail/locations/europe-west3/jobs/{TEST_JOB_ID}?project=XXX
+Submitted job: {TEST_JOB_ID}
+Jun 15, 2020 2:57:28 PM org.apache.beam.runners.dataflow.DataflowRunner run
+INFO: To cancel the job using the 'gcloud' tool, run:
+> gcloud dataflow jobs --project=XXX cancel --region=europe-west3 {TEST_JOB_ID}
+"""
+
+
+class TestBeamHook(unittest.TestCase):
+    @mock.patch(BEAM_STRING.format('BeamCommandRunner'))
+    def test_start_python_pipeline(self, mock_runner):
+        hook = BeamHook(runner=DEFAULT_RUNNER)
+        wait_for_done = mock_runner.return_value.wait_for_done
+        process_line_callback = MagicMock()
+
+        hook.start_python_pipeline(  # pylint: disable=no-value-for-parameter
+            variables=copy.deepcopy(BEAM_VARIABLES_PY),
+            py_file=PY_FILE,
+            py_options=PY_OPTIONS,
+            process_line_callback=process_line_callback,
+        )
+
+        expected_cmd = [
+            "python3",
+            '-m',
+            PY_FILE,
+            f'--runner={DEFAULT_RUNNER}',
+            '--output=gs://test/output',
+            '--labels=foo=bar',
+        ]
+        mock_runner.assert_called_once_with(cmd=expected_cmd, process_line_callback=process_line_callback)
+        wait_for_done.assert_called_once_with()
+
+    @parameterized.expand(
+        [
+            ('default_to_python3', 'python3'),
+            ('major_version_2', 'python2'),
+            ('major_version_3', 'python3'),
+            ('minor_version', 'python3.6'),
+        ]
+    )
+    @mock.patch(BEAM_STRING.format('BeamCommandRunner'))
+    def test_start_python_pipeline_with_custom_interpreter(self, _, py_interpreter, mock_runner):
+        hook = BeamHook(runner=DEFAULT_RUNNER)
+        wait_for_done = mock_runner.return_value.wait_for_done
+        process_line_callback = MagicMock()
+
+        hook.start_python_pipeline(  # pylint: disable=no-value-for-parameter
+            variables=copy.deepcopy(BEAM_VARIABLES_PY),
+            py_file=PY_FILE,
+            py_options=PY_OPTIONS,
+            py_interpreter=py_interpreter,
+            process_line_callback=process_line_callback,
+        )
+
+        expected_cmd = [
+            py_interpreter,
+            '-m',
+            PY_FILE,
+            f'--runner={DEFAULT_RUNNER}',
+            '--output=gs://test/output',
+            '--labels=foo=bar',
+        ]
+        mock_runner.assert_called_once_with(cmd=expected_cmd, process_line_callback=process_line_callback)
+        wait_for_done.assert_called_once_with()
+
+    @parameterized.expand(
+        [
+            (['foo-bar'], False),
+            (['foo-bar'], True),
+            ([], True),
+        ]
+    )
+    @mock.patch(BEAM_STRING.format('prepare_virtualenv'))
+    @mock.patch(BEAM_STRING.format('BeamCommandRunner'))
+    def test_start_python_pipeline_with_non_empty_py_requirements_and_without_system_packages(
+        self, current_py_requirements, current_py_system_site_packages, mock_runner, mock_virtualenv
+    ):
+        hook = BeamHook(runner=DEFAULT_RUNNER)
+        wait_for_done = mock_runner.return_value.wait_for_done
+        mock_virtualenv.return_value = '/dummy_dir/bin/python'
+        process_line_callback = MagicMock()
+
+        hook.start_python_pipeline(  # pylint: disable=no-value-for-parameter
+            variables=copy.deepcopy(BEAM_VARIABLES_PY),
+            py_file=PY_FILE,
+            py_options=PY_OPTIONS,
+            py_requirements=current_py_requirements,
+            py_system_site_packages=current_py_system_site_packages,
+            process_line_callback=process_line_callback,
+        )
+
+        expected_cmd = [
+            '/dummy_dir/bin/python',
+            '-m',
+            PY_FILE,
+            f'--runner={DEFAULT_RUNNER}',
+            '--output=gs://test/output',
+            '--labels=foo=bar',
+        ]
+        mock_runner.assert_called_once_with(cmd=expected_cmd, process_line_callback=process_line_callback)
+        wait_for_done.assert_called_once_with()
+        mock_virtualenv.assert_called_once_with(
+            venv_directory=mock.ANY,
+            python_bin="python3",
+            system_site_packages=current_py_system_site_packages,
+            requirements=current_py_requirements,
+        )
+
+    @mock.patch(BEAM_STRING.format('BeamCommandRunner'))
+    def test_start_python_pipeline_with_empty_py_requirements_and_without_system_packages(self, mock_runner):
+        hook = BeamHook(runner=DEFAULT_RUNNER)
+        wait_for_done = mock_runner.return_value.wait_for_done
+        process_line_callback = MagicMock()
+
+        with self.assertRaisesRegex(AirflowException, "Invalid method invocation."):
+            hook.start_python_pipeline(  # pylint: disable=no-value-for-parameter
+                variables=copy.deepcopy(BEAM_VARIABLES_PY),
+                py_file=PY_FILE,
+                py_options=PY_OPTIONS,
+                py_requirements=[],
+                process_line_callback=process_line_callback,
+            )
+
+        mock_runner.assert_not_called()
+        wait_for_done.assert_not_called()
+
+    @mock.patch(BEAM_STRING.format('BeamCommandRunner'))
+    def test_start_java_pipeline(self, mock_runner):
+        hook = BeamHook(runner=DEFAULT_RUNNER)
+        wait_for_done = mock_runner.return_value.wait_for_done
+        process_line_callback = MagicMock()
+
+        hook.start_java_pipeline(  # pylint: disable=no-value-for-parameter
+            jar=JAR_FILE,
+            variables=copy.deepcopy(BEAM_VARIABLES_JAVA),
+            process_line_callback=process_line_callback,
+        )
+
+        expected_cmd = [
+            'java',
+            '-jar',
+            JAR_FILE,
+            f'--runner={DEFAULT_RUNNER}',
+            '--output=gs://test/output',
+            '--labels={"foo":"bar"}',
+        ]
+        mock_runner.assert_called_once_with(cmd=expected_cmd, process_line_callback=process_line_callback)
+        wait_for_done.assert_called_once_with()
+
+    @mock.patch(BEAM_STRING.format('BeamCommandRunner'))
+    def test_start_java_pipeline_with_job_class(self, mock_runner):
+        hook = BeamHook(runner=DEFAULT_RUNNER)
+        wait_for_done = mock_runner.return_value.wait_for_done
+        process_line_callback = MagicMock()
+
+        hook.start_java_pipeline(  # pylint: disable=no-value-for-parameter
+            jar=JAR_FILE,
+            variables=copy.deepcopy(BEAM_VARIABLES_JAVA),
+            job_class=JOB_CLASS,
+            process_line_callback=process_line_callback,
+        )
+
+        expected_cmd = [
+            'java',
+            '-cp',
+            JAR_FILE,
+            JOB_CLASS,
+            f'--runner={DEFAULT_RUNNER}',
+            '--output=gs://test/output',
+            '--labels={"foo":"bar"}',
+        ]
+        mock_runner.assert_called_once_with(cmd=expected_cmd, process_line_callback=process_line_callback)
+        wait_for_done.assert_called_once_with()
+
+
+class TestBeamRunner(unittest.TestCase):
+    @mock.patch('airflow.providers.apache.beam.hooks.beam.BeamCommandRunner.log')
+    @mock.patch('subprocess.Popen')
+    @mock.patch('select.select')
+    def test_beam_wait_for_done_logging(self, mock_select, mock_popen, mock_logging):
+        cmd = ['test', 'cmd']
+        mock_logging.info = MagicMock()
+        mock_logging.warning = MagicMock()
+        mock_proc = MagicMock()
+        mock_proc.stderr = MagicMock()
+        mock_proc.stderr.readlines = MagicMock(return_value=['test\n', 'error\n'])
+        mock_stderr_fd = MagicMock()
+        mock_proc.stderr.fileno = MagicMock(return_value=mock_stderr_fd)
+        mock_proc_poll = MagicMock()
+        mock_select.return_value = [[mock_stderr_fd]]
+
+        def poll_resp_error():
+            mock_proc.return_code = 1
+            return True
+
+        mock_proc_poll.side_effect = [None, poll_resp_error]
+        mock_proc.poll = mock_proc_poll
+        mock_popen.return_value = mock_proc
+        beam = BeamCommandRunner(cmd)
+        mock_logging.info.assert_called_once_with('Running command: %s', " ".join(cmd))
+        mock_popen.assert_called_once_with(
+            cmd,
+            shell=False,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+            close_fds=True,
+        )
+        self.assertRaises(Exception, beam.wait_for_done)
+
+
+class TestBeamOptionsToArgs(unittest.TestCase):
+    @parameterized.expand(
+        [
+            ({"key": "val"}, ["--key=val"]),
+            ({"key": None}, ["--key"]),
+            ({"key": True}, ["--key"]),
+            ({"key": False}, ["--key=False"]),
+            ({"key": ["a", "b", "c"]}, ["--key=a", "--key=b", "--key=c"]),
+        ]
+    )
+    def test_beam_options_to_args(self, options, expected_args):
+        args = beam_options_to_args(options)
+        assert args == expected_args
diff --git a/tests/providers/apache/beam/operators/__init__.py b/tests/providers/apache/beam/operators/__init__.py
new file mode 100644
index 0000000..13a8339
--- /dev/null
+++ b/tests/providers/apache/beam/operators/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/providers/apache/beam/operators/test_beam.py b/tests/providers/apache/beam/operators/test_beam.py
new file mode 100644
index 0000000..c31ff33
--- /dev/null
+++ b/tests/providers/apache/beam/operators/test_beam.py
@@ -0,0 +1,274 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+import unittest
+from unittest import mock
+
+from airflow.providers.apache.beam.operators.beam import (
+    BeamRunJavaPipelineOperator,
+    BeamRunPythonPipelineOperator,
+)
+from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration
+from airflow.version import version
+
+TASK_ID = 'test-beam-operator'
+DEFAULT_RUNNER = "DirectRunner"
+JOB_NAME = 'test-dataflow-pipeline-name'
+JOB_ID = 'test-dataflow-pipeline-id'
+JAR_FILE = 'gs://my-bucket/example/test.jar'
+JOB_CLASS = 'com.test.NotMain'
+PY_FILE = 'gs://my-bucket/my-object.py'
+PY_INTERPRETER = 'python3'
+PY_OPTIONS = ['-m']
+DEFAULT_OPTIONS_PYTHON = DEFAULT_OPTIONS_JAVA = {
+    'project': 'test',
+    'stagingLocation': 'gs://test/staging',
+}
+ADDITIONAL_OPTIONS = {'output': 'gs://test/output', 'labels': {'foo': 'bar'}}
+TEST_VERSION = f"v{version.replace('.', '-').replace('+', '-')}"
+EXPECTED_ADDITIONAL_OPTIONS = {
+    'output': 'gs://test/output',
+    'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
+}
+
+
+class TestBeamRunPythonPipelineOperator(unittest.TestCase):
+    def setUp(self):
+        self.operator = BeamRunPythonPipelineOperator(
+            task_id=TASK_ID,
+            py_file=PY_FILE,
+            py_options=PY_OPTIONS,
+            default_pipeline_options=DEFAULT_OPTIONS_PYTHON,
+            pipeline_options=ADDITIONAL_OPTIONS,
+        )
+
+    def test_init(self):
+        """Test BeamRunPythonPipelineOperator instance is properly initialized."""
+        self.assertEqual(self.operator.task_id, TASK_ID)
+        self.assertEqual(self.operator.py_file, PY_FILE)
+        self.assertEqual(self.operator.runner, DEFAULT_RUNNER)
+        self.assertEqual(self.operator.py_options, PY_OPTIONS)
+        self.assertEqual(self.operator.py_interpreter, PY_INTERPRETER)
+        self.assertEqual(self.operator.default_pipeline_options, DEFAULT_OPTIONS_PYTHON)
+        self.assertEqual(self.operator.pipeline_options, EXPECTED_ADDITIONAL_OPTIONS)
+
+    @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
+    def test_exec_direct_runner(self, gcs_hook, beam_hook_mock):
+        """Test BeamHook is created and the right args are passed to
+        start_python_workflow.
+        """
+        start_python_hook = beam_hook_mock.return_value.start_python_pipeline
+        gcs_provide_file = gcs_hook.return_value.provide_file
+        self.operator.execute(None)
+        beam_hook_mock.assert_called_once_with(runner=DEFAULT_RUNNER)
+        expected_options = {
+            'project': 'test',
+            'staging_location': 'gs://test/staging',
+            'output': 'gs://test/output',
+            'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
+        }
+        gcs_provide_file.assert_called_once_with(object_url=PY_FILE)
+        start_python_hook.assert_called_once_with(
+            variables=expected_options,
+            py_file=gcs_provide_file.return_value.__enter__.return_value.name,
+            py_options=PY_OPTIONS,
+            py_interpreter=PY_INTERPRETER,
+            py_requirements=None,
+            py_system_site_packages=False,
+            process_line_callback=None,
+        )
+
+    @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
+    def test_exec_dataflow_runner(self, gcs_hook, dataflow_hook_mock, beam_hook_mock):
+        """Test DataflowHook is created and the right args are passed to
+        start_python_dataflow.
+        """
+        dataflow_config = DataflowConfiguration()
+        self.operator.runner = "DataflowRunner"
+        self.operator.dataflow_config = dataflow_config
+        gcs_provide_file = gcs_hook.return_value.provide_file
+        self.operator.execute(None)
+        job_name = dataflow_hook_mock.build_dataflow_job_name.return_value
+        dataflow_hook_mock.assert_called_once_with(
+            gcp_conn_id=dataflow_config.gcp_conn_id,
+            delegate_to=dataflow_config.delegate_to,
+            poll_sleep=dataflow_config.poll_sleep,
+            impersonation_chain=dataflow_config.impersonation_chain,
+            drain_pipeline=dataflow_config.drain_pipeline,
+            cancel_timeout=dataflow_config.cancel_timeout,
+            wait_until_finished=dataflow_config.wait_until_finished,
+        )
+        expected_options = {
+            'project': dataflow_hook_mock.return_value.project_id,
+            'job_name': job_name,
+            'staging_location': 'gs://test/staging',
+            'output': 'gs://test/output',
+            'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
+            'region': 'us-central1',
+        }
+        gcs_provide_file.assert_called_once_with(object_url=PY_FILE)
+        beam_hook_mock.return_value.start_python_pipeline.assert_called_once_with(
+            variables=expected_options,
+            py_file=gcs_provide_file.return_value.__enter__.return_value.name,
+            py_options=PY_OPTIONS,
+            py_interpreter=PY_INTERPRETER,
+            py_requirements=None,
+            py_system_site_packages=False,
+            process_line_callback=mock.ANY,
+        )
+        dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with(
+            job_id=self.operator.dataflow_job_id,
+            job_name=job_name,
+            location='us-central1',
+            multiple_jobs=False,
+        )
+
+    @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
+    def test_on_kill_dataflow_runner(self, dataflow_hook_mock, _, __):
+        self.operator.runner = "DataflowRunner"
+        dataflow_cancel_job = dataflow_hook_mock.return_value.cancel_job
+        self.operator.execute(None)
+        self.operator.dataflow_job_id = JOB_ID
+        self.operator.on_kill()
+        dataflow_cancel_job.assert_called_once_with(
+            job_id=JOB_ID, project_id=self.operator.dataflow_config.project_id
+        )
+
+    @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
+    def test_on_kill_direct_runner(self, _, dataflow_mock, __):
+        dataflow_cancel_job = dataflow_mock.return_value.cancel_job
+        self.operator.execute(None)
+        self.operator.on_kill()
+        dataflow_cancel_job.assert_not_called()
+
+
+class TestBeamRunJavaPipelineOperator(unittest.TestCase):
+    def setUp(self):
+        self.operator = BeamRunJavaPipelineOperator(
+            task_id=TASK_ID,
+            jar=JAR_FILE,
+            job_class=JOB_CLASS,
+            default_pipeline_options=DEFAULT_OPTIONS_JAVA,
+            pipeline_options=ADDITIONAL_OPTIONS,
+        )
+
+    def test_init(self):
+        """Test BeamRunJavaPipelineOperator instance is properly initialized."""
+        self.assertEqual(self.operator.task_id, TASK_ID)
+        self.assertEqual(self.operator.runner, DEFAULT_RUNNER)
+        self.assertEqual(self.operator.default_pipeline_options, DEFAULT_OPTIONS_JAVA)
+        self.assertEqual(self.operator.job_class, JOB_CLASS)
+        self.assertEqual(self.operator.jar, JAR_FILE)
+        self.assertEqual(self.operator.pipeline_options, ADDITIONAL_OPTIONS)
+
+    @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
+    def test_exec_direct_runner(self, gcs_hook, beam_hook_mock):
+        """Test BeamHook is created and the right args are passed to
+        start_java_workflow.
+        """
+        start_java_hook = beam_hook_mock.return_value.start_java_pipeline
+        gcs_provide_file = gcs_hook.return_value.provide_file
+        self.operator.execute(None)
+
+        beam_hook_mock.assert_called_once_with(runner=DEFAULT_RUNNER)
+        gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
+        start_java_hook.assert_called_once_with(
+            variables={**DEFAULT_OPTIONS_JAVA, **ADDITIONAL_OPTIONS},
+            jar=gcs_provide_file.return_value.__enter__.return_value.name,
+            job_class=JOB_CLASS,
+            process_line_callback=None,
+        )
+
+    @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
+    def test_exec_dataflow_runner(self, gcs_hook, dataflow_hook_mock, beam_hook_mock):
+        """Test DataflowHook is created and the right args are passed to
+        start_java_dataflow.
+        """
+        dataflow_config = DataflowConfiguration()
+        self.operator.runner = "DataflowRunner"
+        self.operator.dataflow_config = dataflow_config
+        gcs_provide_file = gcs_hook.return_value.provide_file
+        dataflow_hook_mock.return_value.is_job_dataflow_running.return_value = False
+        self.operator.execute(None)
+        job_name = dataflow_hook_mock.build_dataflow_job_name.return_value
+        self.assertEqual(job_name, self.operator._dataflow_job_name)
+        dataflow_hook_mock.assert_called_once_with(
+            gcp_conn_id=dataflow_config.gcp_conn_id,
+            delegate_to=dataflow_config.delegate_to,
+            poll_sleep=dataflow_config.poll_sleep,
+            impersonation_chain=dataflow_config.impersonation_chain,
+            drain_pipeline=dataflow_config.drain_pipeline,
+            cancel_timeout=dataflow_config.cancel_timeout,
+            wait_until_finished=dataflow_config.wait_until_finished,
+        )
+        gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
+
+        expected_options = {
+            'project': dataflow_hook_mock.return_value.project_id,
+            'jobName': job_name,
+            'stagingLocation': 'gs://test/staging',
+            'region': 'us-central1',
+            'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
+            'output': 'gs://test/output',
+        }
+
+        beam_hook_mock.return_value.start_java_pipeline.assert_called_once_with(
+            variables=expected_options,
+            jar=gcs_provide_file.return_value.__enter__.return_value.name,
+            job_class=JOB_CLASS,
+            process_line_callback=mock.ANY,
+        )
+        dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with(
+            job_id=self.operator.dataflow_job_id,
+            job_name=job_name,
+            location='us-central1',
+            multiple_jobs=dataflow_config.multiple_jobs,
+            project_id=dataflow_hook_mock.return_value.project_id,
+        )
+
+    @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
+    def test_on_kill_dataflow_runner(self, dataflow_hook_mock, _, __):
+        self.operator.runner = "DataflowRunner"
+        dataflow_hook_mock.return_value.is_job_dataflow_running.return_value = False
+        dataflow_cancel_job = dataflow_hook_mock.return_value.cancel_job
+        self.operator.execute(None)
+        self.operator.dataflow_job_id = JOB_ID
+        self.operator.on_kill()
+        dataflow_cancel_job.assert_called_once_with(
+            job_id=JOB_ID, project_id=self.operator.dataflow_config.project_id
+        )
+
+    @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
+    @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
+    def test_on_kill_direct_runner(self, _, dataflow_mock, __):
+        dataflow_cancel_job = dataflow_mock.return_value.cancel_job
+        self.operator.execute(None)
+        self.operator.on_kill()
+        dataflow_cancel_job.assert_not_called()
diff --git a/tests/providers/apache/beam/operators/test_beam_system.py b/tests/providers/apache/beam/operators/test_beam_system.py
new file mode 100644
index 0000000..0798f35
--- /dev/null
+++ b/tests/providers/apache/beam/operators/test_beam_system.py
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import os
+
+import pytest
+
+from tests.test_utils import AIRFLOW_MAIN_FOLDER
+from tests.test_utils.system_tests_class import SystemTest
+
+BEAM_DAG_FOLDER = os.path.join(AIRFLOW_MAIN_FOLDER, "airflow", "providers", "apache", "beam", "example_dags")
+
+
+@pytest.mark.system("apache.beam")
+class BeamExampleDagsSystemTest(SystemTest):
+    def test_run_example_dag_beam_python(self):
+        self.run_dag('example_beam_native_python', BEAM_DAG_FOLDER)
+
+    def test_run_example_dag_beam_python_dataflow_async(self):
+        self.run_dag('example_beam_native_python_dataflow_async', BEAM_DAG_FOLDER)
+
+    def test_run_example_dag_beam_java_direct_runner(self):
+        self.run_dag('example_beam_native_java_direct_runner', BEAM_DAG_FOLDER)
+
+    def test_run_example_dag_beam_java_dataflow_runner(self):
+        self.run_dag('example_beam_native_java_dataflow_runner', BEAM_DAG_FOLDER)
+
+    def test_run_example_dag_beam_java_spark_runner(self):
+        self.run_dag('example_beam_native_java_spark_runner', BEAM_DAG_FOLDER)
+
+    def test_run_example_dag_beam_java_flink_runner(self):
+        self.run_dag('example_beam_native_java_flink_runner', BEAM_DAG_FOLDER)
diff --git a/tests/providers/google/cloud/hooks/test_dataflow.py b/tests/providers/google/cloud/hooks/test_dataflow.py
index 5297b30..c0da030 100644
--- a/tests/providers/google/cloud/hooks/test_dataflow.py
+++ b/tests/providers/google/cloud/hooks/test_dataflow.py
@@ -30,16 +30,20 @@ import pytest
 from parameterized import parameterized
 
 from airflow.exceptions import AirflowException
+from airflow.providers.apache.beam.hooks.beam import BeamCommandRunner, BeamHook
 from airflow.providers.google.cloud.hooks.dataflow import (
     DEFAULT_DATAFLOW_LOCATION,
     DataflowHook,
     DataflowJobStatus,
     DataflowJobType,
     _DataflowJobsController,
-    _DataflowRunner,
     _fallback_to_project_id_from_variables,
+    process_line_and_extract_dataflow_job_id_callback,
 )
 
+DEFAULT_RUNNER = "DirectRunner"
+BEAM_STRING = 'airflow.providers.apache.beam.hooks.beam.{}'
+
 TASK_ID = 'test-dataflow-operator'
 JOB_NAME = 'test-dataflow-pipeline'
 MOCK_UUID = UUID('cf4a56d2-8101-4217-b027-2af6216feb48')
@@ -183,6 +187,7 @@ class TestDataflowHook(unittest.TestCase):
     def setUp(self):
         with mock.patch(BASE_STRING.format('GoogleBaseHook.__init__'), new=mock_init):
             self.dataflow_hook = DataflowHook(gcp_conn_id='test')
+            self.dataflow_hook.beam_hook = MagicMock()
 
     @mock.patch("airflow.providers.google.cloud.hooks.dataflow.DataflowHook._authorize")
     @mock.patch("airflow.providers.google.cloud.hooks.dataflow.build")
@@ -194,186 +199,229 @@ class TestDataflowHook(unittest.TestCase):
         assert mock_build.return_value == result
 
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
-    def test_start_python_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid):
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done'))
+    @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback'))
+    def test_start_python_dataflow(self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid):
+        mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline
         mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
-        self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
-            job_name=JOB_NAME,
-            variables=DATAFLOW_VARIABLES_PY,
-            dataflow=PY_FILE,
+        on_new_job_id_callback = MagicMock()
+        py_requirements = ["pands", "numpy"]
+        job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
+
+        with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"):
+            self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
+                job_name=JOB_NAME,
+                variables=DATAFLOW_VARIABLES_PY,
+                dataflow=PY_FILE,
+                py_options=PY_OPTIONS,
+                py_interpreter=DEFAULT_PY_INTERPRETER,
+                py_requirements=py_requirements,
+                on_new_job_id_callback=on_new_job_id_callback,
+            )
+
+        expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
+        expected_variables["job_name"] = job_name
+        expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback)
+        mock_beam_start_python_pipeline.assert_called_once_with(
+            variables=expected_variables,
+            py_file=PY_FILE,
+            py_interpreter=DEFAULT_PY_INTERPRETER,
             py_options=PY_OPTIONS,
+            py_requirements=py_requirements,
+            py_system_site_packages=False,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+
+        mock_dataflow_wait_for_done.assert_called_once_with(
+            job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION
         )
-        expected_cmd = [
-            "python3",
-            '-m',
-            PY_FILE,
-            '--region=us-central1',
-            '--runner=DataflowRunner',
-            '--project=test',
-            '--labels=foo=bar',
-            '--staging_location=gs://test/staging',
-            f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}',
-        ]
-        assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd)
 
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done'))
+    @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback'))
     def test_start_python_dataflow_with_custom_region_as_variable(
-        self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid
+        self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid
     ):
+        mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline
         mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
-        variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
-        variables['region'] = TEST_LOCATION
-        self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
-            job_name=JOB_NAME,
-            variables=variables,
-            dataflow=PY_FILE,
+        on_new_job_id_callback = MagicMock()
+        py_requirements = ["pands", "numpy"]
+        job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
+
+        passed_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
+        passed_variables["region"] = TEST_LOCATION
+
+        with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"):
+            self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
+                job_name=JOB_NAME,
+                variables=passed_variables,
+                dataflow=PY_FILE,
+                py_options=PY_OPTIONS,
+                py_interpreter=DEFAULT_PY_INTERPRETER,
+                py_requirements=py_requirements,
+                on_new_job_id_callback=on_new_job_id_callback,
+            )
+
+        expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
+        expected_variables["job_name"] = job_name
+        expected_variables["region"] = TEST_LOCATION
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback)
+        mock_beam_start_python_pipeline.assert_called_once_with(
+            variables=expected_variables,
+            py_file=PY_FILE,
+            py_interpreter=DEFAULT_PY_INTERPRETER,
             py_options=PY_OPTIONS,
+            py_requirements=py_requirements,
+            py_system_site_packages=False,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+
+        mock_dataflow_wait_for_done.assert_called_once_with(
+            job_id=mock.ANY, job_name=job_name, location=TEST_LOCATION
         )
-        expected_cmd = [
-            "python3",
-            '-m',
-            PY_FILE,
-            f'--region={TEST_LOCATION}',
-            '--runner=DataflowRunner',
-            '--project=test',
-            '--labels=foo=bar',
-            '--staging_location=gs://test/staging',
-            f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}',
-        ]
-        assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd)
 
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done'))
+    @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback'))
     def test_start_python_dataflow_with_custom_region_as_parameter(
-        self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid
+        self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid
     ):
+        mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline
         mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
-        self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
-            job_name=JOB_NAME,
-            variables=DATAFLOW_VARIABLES_PY,
-            dataflow=PY_FILE,
+        on_new_job_id_callback = MagicMock()
+        py_requirements = ["pands", "numpy"]
+        job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
+
+        passed_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
+
+        with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"):
+            self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
+                job_name=JOB_NAME,
+                variables=passed_variables,
+                dataflow=PY_FILE,
+                py_options=PY_OPTIONS,
+                py_interpreter=DEFAULT_PY_INTERPRETER,
+                py_requirements=py_requirements,
+                on_new_job_id_callback=on_new_job_id_callback,
+                location=TEST_LOCATION,
+            )
+
+        expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
+        expected_variables["job_name"] = job_name
+        expected_variables["region"] = TEST_LOCATION
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback)
+        mock_beam_start_python_pipeline.assert_called_once_with(
+            variables=expected_variables,
+            py_file=PY_FILE,
+            py_interpreter=DEFAULT_PY_INTERPRETER,
             py_options=PY_OPTIONS,
-            location=TEST_LOCATION,
+            py_requirements=py_requirements,
+            py_system_site_packages=False,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+
+        mock_dataflow_wait_for_done.assert_called_once_with(
+            job_id=mock.ANY, job_name=job_name, location=TEST_LOCATION
         )
-        expected_cmd = [
-            "python3",
-            '-m',
-            PY_FILE,
-            f'--region={TEST_LOCATION}',
-            '--runner=DataflowRunner',
-            '--project=test',
-            '--labels=foo=bar',
-            '--staging_location=gs://test/staging',
-            f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}',
-        ]
-        assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd)
 
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done'))
+    @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback'))
     def test_start_python_dataflow_with_multiple_extra_packages(
-        self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid
+        self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid
     ):
+        mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline
         mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
-        variables: Dict[str, Any] = copy.deepcopy(DATAFLOW_VARIABLES_PY)
-        variables['extra-package'] = ['a.whl', 'b.whl']
+        on_new_job_id_callback = MagicMock()
+        py_requirements = ["pands", "numpy"]
+        job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
 
-        self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
-            job_name=JOB_NAME,
-            variables=variables,
-            dataflow=PY_FILE,
+        passed_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
+        passed_variables['extra-package'] = ['a.whl', 'b.whl']
+
+        with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"):
+            self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
+                job_name=JOB_NAME,
+                variables=passed_variables,
+                dataflow=PY_FILE,
+                py_options=PY_OPTIONS,
+                py_interpreter=DEFAULT_PY_INTERPRETER,
+                py_requirements=py_requirements,
+                on_new_job_id_callback=on_new_job_id_callback,
+            )
+
+        expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
+        expected_variables["job_name"] = job_name
+        expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION
+        expected_variables['extra-package'] = ['a.whl', 'b.whl']
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback)
+        mock_beam_start_python_pipeline.assert_called_once_with(
+            variables=expected_variables,
+            py_file=PY_FILE,
+            py_interpreter=DEFAULT_PY_INTERPRETER,
             py_options=PY_OPTIONS,
+            py_requirements=py_requirements,
+            py_system_site_packages=False,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+
+        mock_dataflow_wait_for_done.assert_called_once_with(
+            job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION
         )
-        expected_cmd = [
-            "python3",
-            '-m',
-            PY_FILE,
-            '--extra-package=a.whl',
-            '--extra-package=b.whl',
-            '--region=us-central1',
-            '--runner=DataflowRunner',
-            '--project=test',
-            '--labels=foo=bar',
-            '--staging_location=gs://test/staging',
-            f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}',
-        ]
-        assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd)
 
     @parameterized.expand(
         [
-            ('default_to_python3', 'python3'),
-            ('major_version_2', 'python2'),
-            ('major_version_3', 'python3'),
-            ('minor_version', 'python3.6'),
+            ('python3',),
+            ('python2',),
+            ('python3',),
+            ('python3.6',),
         ]
     )
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done'))
+    @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback'))
     def test_start_python_dataflow_with_custom_interpreter(
-        self,
-        name,
-        py_interpreter,
-        mock_conn,
-        mock_dataflow,
-        mock_dataflowjob,
-        mock_uuid,
+        self, py_interpreter, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid
     ):
-        del name  # unused variable
+        mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline
         mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
-        self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
-            job_name=JOB_NAME,
-            variables=DATAFLOW_VARIABLES_PY,
-            dataflow=PY_FILE,
-            py_options=PY_OPTIONS,
+        on_new_job_id_callback = MagicMock()
+        job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
+
+        with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"):
+            self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
+                job_name=JOB_NAME,
+                variables=DATAFLOW_VARIABLES_PY,
+                dataflow=PY_FILE,
+                py_options=PY_OPTIONS,
+                py_interpreter=py_interpreter,
+                py_requirements=None,
+                on_new_job_id_callback=on_new_job_id_callback,
+            )
+
+        expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
+        expected_variables["job_name"] = job_name
+        expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback)
+        mock_beam_start_python_pipeline.assert_called_once_with(
+            variables=expected_variables,
+            py_file=PY_FILE,
             py_interpreter=py_interpreter,
+            py_options=PY_OPTIONS,
+            py_requirements=None,
+            py_system_site_packages=False,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+
+        mock_dataflow_wait_for_done.assert_called_once_with(
+            job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION
         )
-        expected_cmd = [
-            py_interpreter,
-            '-m',
-            PY_FILE,
-            '--region=us-central1',
-            '--runner=DataflowRunner',
-            '--project=test',
-            '--labels=foo=bar',
-            '--staging_location=gs://test/staging',
-            f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}',
-        ]
-        assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd)
 
     @parameterized.expand(
         [
@@ -382,225 +430,229 @@ class TestDataflowHook(unittest.TestCase):
             ([], True),
         ]
     )
-    @mock.patch(DATAFLOW_STRING.format('prepare_virtualenv'))
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done'))
+    @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback'))
     def test_start_python_dataflow_with_non_empty_py_requirements_and_without_system_packages(
         self,
         current_py_requirements,
         current_py_system_site_packages,
-        mock_conn,
-        mock_dataflow,
-        mock_dataflowjob,
+        mock_callback_on_job_id,
+        mock_dataflow_wait_for_done,
         mock_uuid,
-        mock_virtualenv,
     ):
+        mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline
         mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
-        mock_virtualenv.return_value = '/dummy_dir/bin/python'
-        self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
-            job_name=JOB_NAME,
-            variables=DATAFLOW_VARIABLES_PY,
-            dataflow=PY_FILE,
+        on_new_job_id_callback = MagicMock()
+        job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
+
+        with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"):
+            self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
+                job_name=JOB_NAME,
+                variables=DATAFLOW_VARIABLES_PY,
+                dataflow=PY_FILE,
+                py_options=PY_OPTIONS,
+                py_interpreter=DEFAULT_PY_INTERPRETER,
+                py_requirements=current_py_requirements,
+                py_system_site_packages=current_py_system_site_packages,
+                on_new_job_id_callback=on_new_job_id_callback,
+            )
+
+        expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
+        expected_variables["job_name"] = job_name
+        expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback)
+        mock_beam_start_python_pipeline.assert_called_once_with(
+            variables=expected_variables,
+            py_file=PY_FILE,
+            py_interpreter=DEFAULT_PY_INTERPRETER,
             py_options=PY_OPTIONS,
             py_requirements=current_py_requirements,
             py_system_site_packages=current_py_system_site_packages,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+
+        mock_dataflow_wait_for_done.assert_called_once_with(
+            job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION
         )
-        expected_cmd = [
-            '/dummy_dir/bin/python',
-            '-m',
-            PY_FILE,
-            '--region=us-central1',
-            '--runner=DataflowRunner',
-            '--project=test',
-            '--labels=foo=bar',
-            '--staging_location=gs://test/staging',
-            f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}',
-        ]
-        assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd)
 
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done'))
     def test_start_python_dataflow_with_empty_py_requirements_and_without_system_packages(
-        self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid
+        self, mock_dataflow_wait_for_done, mock_uuid
     ):
+        self.dataflow_hook.beam_hook = BeamHook(runner="DataflowRunner")
         mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
-        with pytest.raises(AirflowException, match="Invalid method invocation."):
+        on_new_job_id_callback = MagicMock()
+
+        with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"), self.assertRaisesRegex(
+            AirflowException, "Invalid method invocation."
+        ):
             self.dataflow_hook.start_python_dataflow(  # pylint: disable=no-value-for-parameter
                 job_name=JOB_NAME,
                 variables=DATAFLOW_VARIABLES_PY,
                 dataflow=PY_FILE,
                 py_options=PY_OPTIONS,
+                py_interpreter=DEFAULT_PY_INTERPRETER,
                 py_requirements=[],
+                on_new_job_id_callback=on_new_job_id_callback,
             )
 
+        mock_dataflow_wait_for_done.assert_not_called()
+
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
-    def test_start_java_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid):
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done'))
+    @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback'))
+    def test_start_java_dataflow(self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid):
+        mock_beam_start_java_pipeline = self.dataflow_hook.beam_hook.start_java_pipeline
         mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
-        self.dataflow_hook.start_java_dataflow(  # pylint: disable=no-value-for-parameter
-            job_name=JOB_NAME, variables=DATAFLOW_VARIABLES_JAVA, jar=JAR_FILE
-        )
-        expected_cmd = [
-            'java',
-            '-jar',
-            JAR_FILE,
-            '--region=us-central1',
-            '--runner=DataflowRunner',
-            '--project=test',
-            '--stagingLocation=gs://test/staging',
-            '--labels={"foo":"bar"}',
-            f'--jobName={JOB_NAME}-{MOCK_UUID_PREFIX}',
-        ]
-        assert sorted(expected_cmd) == sorted(mock_dataflow.call_args[1]["cmd"])
+        on_new_job_id_callback = MagicMock()
+        job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
+
+        with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"):
+            self.dataflow_hook.start_java_dataflow(  # pylint: disable=no-value-for-parameter
+                job_name=JOB_NAME,
+                variables=DATAFLOW_VARIABLES_JAVA,
+                jar=JAR_FILE,
+                job_class=JOB_CLASS,
+                on_new_job_id_callback=on_new_job_id_callback,
+            )
+
+        expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_JAVA)
+        expected_variables["jobName"] = job_name
+        expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION
+        expected_variables["labels"] = '{"foo":"bar"}'
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback)
+        mock_beam_start_java_pipeline.assert_called_once_with(
+            variables=expected_variables,
+            jar=JAR_FILE,
+            job_class=JOB_CLASS,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+
+        mock_dataflow_wait_for_done.assert_called_once_with(
+            job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION, multiple_jobs=False
+        )
 
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done'))
+    @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback'))
     def test_start_java_dataflow_with_multiple_values_in_variables(
-        self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid
+        self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid
     ):
+        mock_beam_start_java_pipeline = self.dataflow_hook.beam_hook.start_java_pipeline
         mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
-        variables: Dict[str, Any] = copy.deepcopy(DATAFLOW_VARIABLES_JAVA)
-        variables['mock-option'] = ['a.whl', 'b.whl']
-
-        self.dataflow_hook.start_java_dataflow(  # pylint: disable=no-value-for-parameter
-            job_name=JOB_NAME, variables=variables, jar=JAR_FILE
-        )
-        expected_cmd = [
-            'java',
-            '-jar',
-            JAR_FILE,
-            '--mock-option=a.whl',
-            '--mock-option=b.whl',
-            '--region=us-central1',
-            '--runner=DataflowRunner',
-            '--project=test',
-            '--stagingLocation=gs://test/staging',
-            '--labels={"foo":"bar"}',
-            f'--jobName={JOB_NAME}-{MOCK_UUID_PREFIX}',
-        ]
-        assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd)
+        on_new_job_id_callback = MagicMock()
+        job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
+
+        passed_variables: Dict[str, Any] = copy.deepcopy(DATAFLOW_VARIABLES_JAVA)
+        passed_variables['mock-option'] = ['a.whl', 'b.whl']
+
+        with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"):
+            self.dataflow_hook.start_java_dataflow(  # pylint: disable=no-value-for-parameter
+                job_name=JOB_NAME,
+                variables=passed_variables,
+                jar=JAR_FILE,
+                job_class=JOB_CLASS,
+                on_new_job_id_callback=on_new_job_id_callback,
+            )
+
+        expected_variables = copy.deepcopy(passed_variables)
+        expected_variables["jobName"] = job_name
+        expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION
+        expected_variables["labels"] = '{"foo":"bar"}'
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback)
+        mock_beam_start_java_pipeline.assert_called_once_with(
+            variables=expected_variables,
+            jar=JAR_FILE,
+            job_class=JOB_CLASS,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+
+        mock_dataflow_wait_for_done.assert_called_once_with(
+            job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION, multiple_jobs=False
+        )
 
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done'))
+    @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback'))
     def test_start_java_dataflow_with_custom_region_as_variable(
-        self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid
+        self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid
     ):
+        mock_beam_start_java_pipeline = self.dataflow_hook.beam_hook.start_java_pipeline
         mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
+        on_new_job_id_callback = MagicMock()
+        job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
 
-        variables = copy.deepcopy(DATAFLOW_VARIABLES_JAVA)
-        variables['region'] = TEST_LOCATION
-
-        self.dataflow_hook.start_java_dataflow(  # pylint: disable=no-value-for-parameter
-            job_name=JOB_NAME, variables=variables, jar=JAR_FILE
-        )
-        expected_cmd = [
-            'java',
-            '-jar',
-            JAR_FILE,
-            f'--region={TEST_LOCATION}',
-            '--runner=DataflowRunner',
-            '--project=test',
-            '--stagingLocation=gs://test/staging',
-            '--labels={"foo":"bar"}',
-            f'--jobName={JOB_NAME}-{MOCK_UUID_PREFIX}',
-        ]
-        assert sorted(expected_cmd) == sorted(mock_dataflow.call_args[1]["cmd"])
+        passed_variables: Dict[str, Any] = copy.deepcopy(DATAFLOW_VARIABLES_JAVA)
+        passed_variables['region'] = TEST_LOCATION
+
+        with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"):
+            self.dataflow_hook.start_java_dataflow(  # pylint: disable=no-value-for-parameter
+                job_name=JOB_NAME,
+                variables=passed_variables,
+                jar=JAR_FILE,
+                job_class=JOB_CLASS,
+                on_new_job_id_callback=on_new_job_id_callback,
+            )
+
+        expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_JAVA)
+        expected_variables["jobName"] = job_name
+        expected_variables["region"] = TEST_LOCATION
+        expected_variables["labels"] = '{"foo":"bar"}'
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback)
+        mock_beam_start_java_pipeline.assert_called_once_with(
+            variables=expected_variables,
+            jar=JAR_FILE,
+            job_class=JOB_CLASS,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+
+        mock_dataflow_wait_for_done.assert_called_once_with(
+            job_id=mock.ANY, job_name=job_name, location=TEST_LOCATION, multiple_jobs=False
+        )
 
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done'))
+    @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback'))
     def test_start_java_dataflow_with_custom_region_as_parameter(
-        self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid
+        self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid
     ):
+        mock_beam_start_java_pipeline = self.dataflow_hook.beam_hook.start_java_pipeline
         mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
+        on_new_job_id_callback = MagicMock()
+        job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
 
-        variables = copy.deepcopy(DATAFLOW_VARIABLES_JAVA)
-        variables['region'] = TEST_LOCATION
-
-        self.dataflow_hook.start_java_dataflow(  # pylint: disable=no-value-for-parameter
-            job_name=JOB_NAME, variables=variables, jar=JAR_FILE
-        )
-        expected_cmd = [
-            'java',
-            '-jar',
-            JAR_FILE,
-            f'--region={TEST_LOCATION}',
-            '--runner=DataflowRunner',
-            '--project=test',
-            '--stagingLocation=gs://test/staging',
-            '--labels={"foo":"bar"}',
-            f'--jobName={JOB_NAME}-{MOCK_UUID_PREFIX}',
-        ]
-        assert sorted(expected_cmd) == sorted(mock_dataflow.call_args[1]["cmd"])
+        with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"):
+            self.dataflow_hook.start_java_dataflow(  # pylint: disable=no-value-for-parameter
+                job_name=JOB_NAME,
+                variables=DATAFLOW_VARIABLES_JAVA,
+                jar=JAR_FILE,
+                job_class=JOB_CLASS,
+                on_new_job_id_callback=on_new_job_id_callback,
+                location=TEST_LOCATION,
+            )
 
-    @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
-    @mock.patch(DATAFLOW_STRING.format('_DataflowRunner'))
-    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
-    def test_start_java_dataflow_with_job_class(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid):
-        mock_uuid.return_value = MOCK_UUID
-        mock_conn.return_value = None
-        dataflow_instance = mock_dataflow.return_value
-        dataflow_instance.wait_for_done.return_value = None
-        dataflowjob_instance = mock_dataflowjob.return_value
-        dataflowjob_instance.wait_for_done.return_value = None
-        self.dataflow_hook.start_java_dataflow(  # pylint: disable=no-value-for-parameter
-            job_name=JOB_NAME, variables=DATAFLOW_VARIABLES_JAVA, jar=JAR_FILE, job_class=JOB_CLASS
-        )
-        expected_cmd = [
-            'java',
-            '-cp',
-            JAR_FILE,
-            JOB_CLASS,
-            '--region=us-central1',
-            '--runner=DataflowRunner',
-            '--project=test',
-            '--stagingLocation=gs://test/staging',
-            '--labels={"foo":"bar"}',
-            f'--jobName={JOB_NAME}-{MOCK_UUID_PREFIX}',
-        ]
-        assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd)
+        expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_JAVA)
+        expected_variables["jobName"] = job_name
+        expected_variables["region"] = TEST_LOCATION
+        expected_variables["labels"] = '{"foo":"bar"}'
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback)
+        mock_beam_start_java_pipeline.assert_called_once_with(
+            variables=expected_variables,
+            jar=JAR_FILE,
+            job_class=JOB_CLASS,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+
+        mock_dataflow_wait_for_done.assert_called_once_with(
+            job_id=mock.ANY, job_name=job_name, location=TEST_LOCATION, multiple_jobs=False
+        )
 
     @parameterized.expand(
         [
@@ -616,17 +668,20 @@ class TestDataflowHook(unittest.TestCase):
     )
     @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'), return_value=MOCK_UUID)
     def test_valid_dataflow_job_name(self, expected_result, job_name, append_job_name, mock_uuid4):
-        job_name = self.dataflow_hook._build_dataflow_job_name(
+        job_name = self.dataflow_hook.build_dataflow_job_name(
             job_name=job_name, append_job_name=append_job_name
         )
 
-        assert expected_result == job_name
+        self.assertEqual(expected_result, job_name)
 
+    #
     @parameterized.expand([("1dfjob@",), ("dfjob@",), ("df^jo",)])
     def test_build_dataflow_job_name_with_invalid_value(self, job_name):
-        with pytest.raises(ValueError):
-            self.dataflow_hook._build_dataflow_job_name(job_name=job_name, append_job_name=False)
+        self.assertRaises(
+            ValueError, self.dataflow_hook.build_dataflow_job_name, job_name=job_name, append_job_name=False
+        )
 
+    #
     @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
     @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
     def test_get_job(self, mock_conn, mock_dataflowjob):
@@ -641,6 +696,7 @@ class TestDataflowHook(unittest.TestCase):
         )
         method_fetch_job_by_id.assert_called_once_with(TEST_JOB_ID)
 
+    #
     @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
     @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
     def test_fetch_job_metrics_by_id(self, mock_conn, mock_dataflowjob):
@@ -706,6 +762,34 @@ class TestDataflowHook(unittest.TestCase):
         )
         method_fetch_job_autoscaling_events_by_id.assert_called_once_with(TEST_JOB_ID)
 
+    @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController'))
+    @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn'))
+    def test_wait_for_done(self, mock_conn, mock_dataflowjob):
+        method_wait_for_done = mock_dataflowjob.return_value.wait_for_done
+
+        self.dataflow_hook.wait_for_done(
+            job_name="JOB_NAME",
+            project_id=TEST_PROJECT_ID,
+            job_id=TEST_JOB_ID,
+            location=TEST_LOCATION,
+            multiple_jobs=False,
+        )
+        mock_conn.assert_called_once()
+        mock_dataflowjob.assert_called_once_with(
+            dataflow=mock_conn.return_value,
+            project_number=TEST_PROJECT_ID,
+            name="JOB_NAME",
+            location=TEST_LOCATION,
+            poll_sleep=self.dataflow_hook.poll_sleep,
+            job_id=TEST_JOB_ID,
+            num_retries=self.dataflow_hook.num_retries,
+            multiple_jobs=False,
+            drain_pipeline=self.dataflow_hook.drain_pipeline,
+            cancel_timeout=self.dataflow_hook.cancel_timeout,
+            wait_until_finished=self.dataflow_hook.wait_until_finished,
+        )
+        method_wait_for_done.assert_called_once_with()
+
 
 class TestDataflowTemplateHook(unittest.TestCase):
     def setUp(self):
@@ -1691,13 +1775,32 @@ class TestDataflow(unittest.TestCase):
     def test_data_flow_valid_job_id(self, log):
         echos = ";".join([f"echo {shlex.quote(line)}" for line in log.split("\n")])
         cmd = ["bash", "-c", echos]
-        assert _DataflowRunner(cmd).wait_for_done() == TEST_JOB_ID
+        found_job_id = None
+
+        def callback(job_id):
+            nonlocal found_job_id
+            found_job_id = job_id
+
+        BeamCommandRunner(
+            cmd, process_line_callback=process_line_and_extract_dataflow_job_id_callback(callback)
+        ).wait_for_done()
+        self.assertEqual(found_job_id, TEST_JOB_ID)
 
     def test_data_flow_missing_job_id(self):
         cmd = ['echo', 'unit testing']
-        assert _DataflowRunner(cmd).wait_for_done() is None
+        found_job_id = None
+
+        def callback(job_id):
+            nonlocal found_job_id
+            found_job_id = job_id
+
+        BeamCommandRunner(
+            cmd, process_line_callback=process_line_and_extract_dataflow_job_id_callback(callback)
+        ).wait_for_done()
+
+        self.assertEqual(found_job_id, None)
 
-    @mock.patch('airflow.providers.google.cloud.hooks.dataflow._DataflowRunner.log')
+    @mock.patch('airflow.providers.apache.beam.hooks.beam.BeamCommandRunner.log')
     @mock.patch('subprocess.Popen')
     @mock.patch('select.select')
     def test_dataflow_wait_for_done_logging(self, mock_select, mock_popen, mock_logging):
@@ -1718,7 +1821,6 @@ class TestDataflow(unittest.TestCase):
         mock_proc_poll.side_effect = [None, poll_resp_error]
         mock_proc.poll = mock_proc_poll
         mock_popen.return_value = mock_proc
-        dataflow = _DataflowRunner(['test', 'cmd'])
+        dataflow = BeamCommandRunner(['test', 'cmd'])
         mock_logging.info.assert_called_once_with('Running command: %s', 'test cmd')
-        with pytest.raises(Exception):
-            dataflow.wait_for_done()
+        self.assertRaises(Exception, dataflow.wait_for_done)
diff --git a/tests/providers/google/cloud/operators/test_dataflow.py b/tests/providers/google/cloud/operators/test_dataflow.py
index c682a31..5d65dcb 100644
--- a/tests/providers/google/cloud/operators/test_dataflow.py
+++ b/tests/providers/google/cloud/operators/test_dataflow.py
@@ -16,7 +16,7 @@
 # specific language governing permissions and limitations
 # under the License.
 #
-
+import copy
 import unittest
 from copy import deepcopy
 from unittest import mock
@@ -115,35 +115,56 @@ class TestDataflowPythonOperator(unittest.TestCase):
         assert self.dataflow.dataflow_default_options == DEFAULT_OPTIONS_PYTHON
         assert self.dataflow.options == EXPECTED_ADDITIONAL_OPTIONS
 
+    @mock.patch(
+        'airflow.providers.google.cloud.operators.dataflow.process_line_and_extract_dataflow_job_id_callback'
+    )
+    @mock.patch('airflow.providers.google.cloud.operators.dataflow.BeamHook')
     @mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
     @mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
-    def test_exec(self, gcs_hook, dataflow_mock):
+    def test_exec(self, gcs_hook, dataflow_hook_mock, beam_hook_mock, mock_callback_on_job_id):
         """Test DataflowHook is created and the right args are passed to
         start_python_workflow.
 
         """
-        start_python_hook = dataflow_mock.return_value.start_python_dataflow
+        start_python_mock = beam_hook_mock.return_value.start_python_pipeline
         gcs_provide_file = gcs_hook.return_value.provide_file
+        job_name = dataflow_hook_mock.return_value.build_dataflow_job_name.return_value
         self.dataflow.execute(None)
-        assert dataflow_mock.called
+        beam_hook_mock.assert_called_once_with(runner="DataflowRunner")
+        self.assertTrue(self.dataflow.py_file.startswith('/tmp/dataflow'))
+        gcs_provide_file.assert_called_once_with(object_url=PY_FILE)
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback=mock.ANY)
+        dataflow_hook_mock.assert_called_once_with(
+            gcp_conn_id="google_cloud_default",
+            delegate_to=mock.ANY,
+            poll_sleep=POLL_SLEEP,
+            impersonation_chain=None,
+            drain_pipeline=False,
+            cancel_timeout=mock.ANY,
+            wait_until_finished=None,
+        )
         expected_options = {
-            'project': 'test',
-            'staging_location': 'gs://test/staging',
+            "project": dataflow_hook_mock.return_value.project_id,
+            "staging_location": 'gs://test/staging',
+            "job_name": job_name,
+            "region": TEST_LOCATION,
             'output': 'gs://test/output',
-            'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
+            'labels': {'foo': 'bar', 'airflow-version': 'v2-1-0-dev0'},
         }
-        gcs_provide_file.assert_called_once_with(object_url=PY_FILE)
-        start_python_hook.assert_called_once_with(
-            job_name=JOB_NAME,
+        start_python_mock.assert_called_once_with(
             variables=expected_options,
-            dataflow=mock.ANY,
+            py_file=gcs_provide_file.return_value.__enter__.return_value.name,
             py_options=PY_OPTIONS,
             py_interpreter=PY_INTERPRETER,
             py_requirements=None,
             py_system_site_packages=False,
-            on_new_job_id_callback=mock.ANY,
-            project_id=None,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+        dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with(
+            job_id=mock.ANY,
+            job_name=job_name,
             location=TEST_LOCATION,
+            multiple_jobs=False,
         )
         assert self.dataflow.py_file.startswith('/tmp/dataflow')
 
@@ -172,110 +193,182 @@ class TestDataflowJavaOperator(unittest.TestCase):
         assert self.dataflow.options == EXPECTED_ADDITIONAL_OPTIONS
         assert self.dataflow.check_if_running == CheckJobRunning.WaitForRun
 
+    @mock.patch(
+        'airflow.providers.google.cloud.operators.dataflow.process_line_and_extract_dataflow_job_id_callback'
+    )
+    @mock.patch('airflow.providers.google.cloud.operators.dataflow.BeamHook')
     @mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
     @mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
-    def test_exec(self, gcs_hook, dataflow_mock):
+    def test_exec(self, gcs_hook, dataflow_hook_mock, beam_hook_mock, mock_callback_on_job_id):
         """Test DataflowHook is created and the right args are passed to
         start_java_workflow.
 
         """
-        start_java_hook = dataflow_mock.return_value.start_java_dataflow
+        start_java_mock = beam_hook_mock.return_value.start_java_pipeline
         gcs_provide_file = gcs_hook.return_value.provide_file
+        job_name = dataflow_hook_mock.return_value.build_dataflow_job_name.return_value
         self.dataflow.check_if_running = CheckJobRunning.IgnoreJob
+
         self.dataflow.execute(None)
-        assert dataflow_mock.called
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback=mock.ANY)
         gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
-        start_java_hook.assert_called_once_with(
-            job_name=JOB_NAME,
-            variables=mock.ANY,
-            jar=mock.ANY,
+        expected_variables = {
+            'project': dataflow_hook_mock.return_value.project_id,
+            'stagingLocation': 'gs://test/staging',
+            'jobName': job_name,
+            'region': TEST_LOCATION,
+            'output': 'gs://test/output',
+            'labels': {'foo': 'bar', 'airflow-version': 'v2-1-0-dev0'},
+        }
+
+        start_java_mock.assert_called_once_with(
+            variables=expected_variables,
+            jar=gcs_provide_file.return_value.__enter__.return_value.name,
             job_class=JOB_CLASS,
-            append_job_name=True,
-            multiple_jobs=None,
-            on_new_job_id_callback=mock.ANY,
-            project_id=None,
+            process_line_callback=mock_callback_on_job_id.return_value,
+        )
+        dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with(
+            job_id=mock.ANY,
+            job_name=job_name,
             location=TEST_LOCATION,
+            multiple_jobs=None,
         )
 
+    @mock.patch('airflow.providers.google.cloud.operators.dataflow.BeamHook')
     @mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
     @mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
-    def test_check_job_running_exec(self, gcs_hook, dataflow_mock):
+    def test_check_job_running_exec(self, gcs_hook, dataflow_mock, beam_hook_mock):
         """Test DataflowHook is created and the right args are passed to
         start_java_workflow.
 
         """
         dataflow_running = dataflow_mock.return_value.is_job_dataflow_running
         dataflow_running.return_value = True
-        start_java_hook = dataflow_mock.return_value.start_java_dataflow
+        start_java_hook = beam_hook_mock.return_value.start_java_pipeline
         gcs_provide_file = gcs_hook.return_value.provide_file
         self.dataflow.check_if_running = True
+
         self.dataflow.execute(None)
-        assert dataflow_mock.called
-        gcs_provide_file.assert_not_called()
+
+        self.assertTrue(dataflow_mock.called)
         start_java_hook.assert_not_called()
-        dataflow_running.assert_called_once_with(
-            name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION
-        )
+        gcs_provide_file.assert_called_once()
+        variables = {
+            'project': dataflow_mock.return_value.project_id,
+            'stagingLocation': 'gs://test/staging',
+            'jobName': JOB_NAME,
+            'region': TEST_LOCATION,
+            'output': 'gs://test/output',
+            'labels': {'foo': 'bar', 'airflow-version': 'v2-1-0-dev0'},
+        }
+        dataflow_running.assert_called_once_with(name=JOB_NAME, variables=variables)
 
+    @mock.patch(
+        'airflow.providers.google.cloud.operators.dataflow.process_line_and_extract_dataflow_job_id_callback'
+    )
+    @mock.patch('airflow.providers.google.cloud.operators.dataflow.BeamHook')
     @mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
     @mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
-    def test_check_job_not_running_exec(self, gcs_hook, dataflow_mock):
+    def test_check_job_not_running_exec(
+        self, gcs_hook, dataflow_hook_mock, beam_hook_mock, mock_callback_on_job_id
+    ):
         """Test DataflowHook is created and the right args are passed to
         start_java_workflow with option to check if job is running
-
         """
-        dataflow_running = dataflow_mock.return_value.is_job_dataflow_running
+        is_job_dataflow_running_variables = None
+
+        def set_is_job_dataflow_running_variables(*args, **kwargs):
+            nonlocal is_job_dataflow_running_variables
+            is_job_dataflow_running_variables = copy.deepcopy(kwargs.get("variables"))
+
+        dataflow_running = dataflow_hook_mock.return_value.is_job_dataflow_running
+        dataflow_running.side_effect = set_is_job_dataflow_running_variables
         dataflow_running.return_value = False
-        start_java_hook = dataflow_mock.return_value.start_java_dataflow
+        start_java_mock = beam_hook_mock.return_value.start_java_pipeline
         gcs_provide_file = gcs_hook.return_value.provide_file
         self.dataflow.check_if_running = True
+
         self.dataflow.execute(None)
-        assert dataflow_mock.called
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback=mock.ANY)
         gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
-        start_java_hook.assert_called_once_with(
-            job_name=JOB_NAME,
-            variables=mock.ANY,
-            jar=mock.ANY,
+        expected_variables = {
+            'project': dataflow_hook_mock.return_value.project_id,
+            'stagingLocation': 'gs://test/staging',
+            'jobName': JOB_NAME,
+            'region': TEST_LOCATION,
+            'output': 'gs://test/output',
+            'labels': {'foo': 'bar', 'airflow-version': 'v2-1-0-dev0'},
+        }
+        self.assertEqual(expected_variables, is_job_dataflow_running_variables)
+        job_name = dataflow_hook_mock.return_value.build_dataflow_job_name.return_value
+        expected_variables["jobName"] = job_name
+        start_java_mock.assert_called_once_with(
+            variables=expected_variables,
+            jar=gcs_provide_file.return_value.__enter__.return_value.name,
             job_class=JOB_CLASS,
-            append_job_name=True,
-            multiple_jobs=None,
-            on_new_job_id_callback=mock.ANY,
-            project_id=None,
-            location=TEST_LOCATION,
+            process_line_callback=mock_callback_on_job_id.return_value,
         )
-        dataflow_running.assert_called_once_with(
-            name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION
+        dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with(
+            job_id=mock.ANY,
+            job_name=job_name,
+            location=TEST_LOCATION,
+            multiple_jobs=None,
         )
 
+    @mock.patch(
+        'airflow.providers.google.cloud.operators.dataflow.process_line_and_extract_dataflow_job_id_callback'
+    )
+    @mock.patch('airflow.providers.google.cloud.operators.dataflow.BeamHook')
     @mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
     @mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
-    def test_check_multiple_job_exec(self, gcs_hook, dataflow_mock):
+    def test_check_multiple_job_exec(
+        self, gcs_hook, dataflow_hook_mock, beam_hook_mock, mock_callback_on_job_id
+    ):
         """Test DataflowHook is created and the right args are passed to
-        start_java_workflow with option to check multiple jobs
-
+        start_java_workflow with option to check if job is running
         """
-        dataflow_running = dataflow_mock.return_value.is_job_dataflow_running
+        is_job_dataflow_running_variables = None
+
+        def set_is_job_dataflow_running_variables(*args, **kwargs):
+            nonlocal is_job_dataflow_running_variables
+            is_job_dataflow_running_variables = copy.deepcopy(kwargs.get("variables"))
+
+        dataflow_running = dataflow_hook_mock.return_value.is_job_dataflow_running
+        dataflow_running.side_effect = set_is_job_dataflow_running_variables
         dataflow_running.return_value = False
-        start_java_hook = dataflow_mock.return_value.start_java_dataflow
+        start_java_mock = beam_hook_mock.return_value.start_java_pipeline
         gcs_provide_file = gcs_hook.return_value.provide_file
-        self.dataflow.multiple_jobs = True
         self.dataflow.check_if_running = True
+        self.dataflow.multiple_jobs = True
+
         self.dataflow.execute(None)
-        assert dataflow_mock.called
+
+        mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback=mock.ANY)
         gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
-        start_java_hook.assert_called_once_with(
-            job_name=JOB_NAME,
-            variables=mock.ANY,
-            jar=mock.ANY,
+        expected_variables = {
+            'project': dataflow_hook_mock.return_value.project_id,
+            'stagingLocation': 'gs://test/staging',
+            'jobName': JOB_NAME,
+            'region': TEST_LOCATION,
+            'output': 'gs://test/output',
+            'labels': {'foo': 'bar', 'airflow-version': 'v2-1-0-dev0'},
+        }
+        self.assertEqual(expected_variables, is_job_dataflow_running_variables)
+        job_name = dataflow_hook_mock.return_value.build_dataflow_job_name.return_value
+        expected_variables["jobName"] = job_name
+        start_java_mock.assert_called_once_with(
+            variables=expected_variables,
+            jar=gcs_provide_file.return_value.__enter__.return_value.name,
             job_class=JOB_CLASS,
-            append_job_name=True,
-            multiple_jobs=True,
-            on_new_job_id_callback=mock.ANY,
-            project_id=None,
-            location=TEST_LOCATION,
+            process_line_callback=mock_callback_on_job_id.return_value,
         )
-        dataflow_running.assert_called_once_with(
-            name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION
+        dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with(
+            job_id=mock.ANY,
+            job_name=job_name,
+            location=TEST_LOCATION,
+            multiple_jobs=True,
         )
 
 
diff --git a/tests/providers/google/cloud/operators/test_mlengine_utils.py b/tests/providers/google/cloud/operators/test_mlengine_utils.py
index 65b41b6..37a753a 100644
--- a/tests/providers/google/cloud/operators/test_mlengine_utils.py
+++ b/tests/providers/google/cloud/operators/test_mlengine_utils.py
@@ -106,9 +106,14 @@ class TestCreateEvaluateOps(unittest.TestCase):
             )
             assert success_message['predictionOutput'] == result
 
-        with patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook') as mock_dataflow_hook:
-            hook_instance = mock_dataflow_hook.return_value
-            hook_instance.start_python_dataflow.return_value = None
+        with patch(
+            'airflow.providers.google.cloud.operators.dataflow.DataflowHook'
+        ) as mock_dataflow_hook, patch(
+            'airflow.providers.google.cloud.operators.dataflow.BeamHook'
+        ) as mock_beam_hook:
+            dataflow_hook_instance = mock_dataflow_hook.return_value
+            dataflow_hook_instance.start_python_dataflow.return_value = None
+            beam_hook_instance = mock_beam_hook.return_value
             summary.execute(None)
             mock_dataflow_hook.assert_called_once_with(
                 gcp_conn_id='google_cloud_default',
@@ -117,23 +122,28 @@ class TestCreateEvaluateOps(unittest.TestCase):
                 drain_pipeline=False,
                 cancel_timeout=600,
                 wait_until_finished=None,
+                impersonation_chain=None,
             )
-            hook_instance.start_python_dataflow.assert_called_once_with(
-                job_name='{{task.task_id}}',
+            mock_beam_hook.assert_called_once_with(runner="DataflowRunner")
+            beam_hook_instance.start_python_pipeline.assert_called_once_with(
                 variables={
                     'prediction_path': 'gs://legal-bucket/fake-output-path',
                     'labels': {'airflow-version': TEST_VERSION},
                     'metric_keys': 'err',
                     'metric_fn_encoded': self.metric_fn_encoded,
+                    'project': 'test-project',
+                    'region': 'us-central1',
+                    'job_name': mock.ANY,
                 },
-                dataflow=mock.ANY,
+                py_file=mock.ANY,
                 py_options=[],
-                py_requirements=['apache-beam[gcp]>=2.14.0'],
                 py_interpreter='python3',
+                py_requirements=['apache-beam[gcp]>=2.14.0'],
                 py_system_site_packages=False,
-                on_new_job_id_callback=ANY,
-                project_id='test-project',
-                location='us-central1',
+                process_line_callback=mock.ANY,
+            )
+            dataflow_hook_instance.wait_for_done.assert_called_once_with(
+                job_name=mock.ANY, location='us-central1', job_id=mock.ANY, multiple_jobs=False
             )
 
         with patch('airflow.providers.google.cloud.utils.mlengine_operator_utils.GCSHook') as mock_gcs_hook:


[airflow] 08/41: Add better description and guidance in case of sqlite version mismatch (#14209)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit f1ae4a4353ba7b4ebd8173b4bc05365b4de94275
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Sat Feb 13 18:46:37 2021 +0100

    Add better description and guidance in case of sqlite version mismatch (#14209)
    
    Closes: #14208
    (cherry picked from commit 4c90712f192dd552d1791712a49bcdc810ebe82f)
---
 airflow/configuration.py                      |  6 +++-
 docs/apache-airflow/howto/set-up-database.rst | 45 +++++++++++++++++++++++++++
 2 files changed, 50 insertions(+), 1 deletion(-)

diff --git a/airflow/configuration.py b/airflow/configuration.py
index 5b765de..5995433 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -39,6 +39,7 @@ from cryptography.fernet import Fernet
 
 from airflow.exceptions import AirflowConfigException
 from airflow.secrets import DEFAULT_SECRETS_SEARCH_PATH, BaseSecretsBackend
+from airflow.utils.docs import get_docs_url
 from airflow.utils.module_loading import import_string
 
 log = logging.getLogger(__name__)
@@ -243,7 +244,10 @@ class AirflowConfigParser(ConfigParser):  # pylint: disable=too-many-ancestors
             # Some of the features in storing rendered fields require sqlite version >= 3.15.0
             min_sqlite_version = '3.15.0'
             if StrictVersion(sqlite3.sqlite_version) < StrictVersion(min_sqlite_version):
-                raise AirflowConfigException(f"error: cannot use sqlite version < {min_sqlite_version}")
+                raise AirflowConfigException(
+                    f"error: sqlite C library version too old (< {min_sqlite_version}). "
+                    f"See {get_docs_url('howto/set-up-database.rst#setting-up-a-sqlite-database')}"
+                )
 
         if self.has_option('core', 'mp_start_method'):
             mp_start_method = self.get('core', 'mp_start_method')
diff --git a/docs/apache-airflow/howto/set-up-database.rst b/docs/apache-airflow/howto/set-up-database.rst
index 3afdff1..0d4f578 100644
--- a/docs/apache-airflow/howto/set-up-database.rst
+++ b/docs/apache-airflow/howto/set-up-database.rst
@@ -59,6 +59,51 @@ the example below.
 
 The exact format description is described in the SQLAlchemy documentation, see `Database Urls <https://docs.sqlalchemy.org/en/14/core/engines.html>`__. We will also show you some examples below.
 
+Setting up a SQLite Database
+----------------------------
+
+SQLite database can be used to run Airflow for development purpose as it does not require any database server
+(the database is stored in a local file). There are a few limitations of using the SQLite database (for example
+it only works with Sequential Executor) and it should NEVER be used for production.
+
+There is a minimum version of sqlite3 required to run Airflow 2.0+ - minimum version is 3.15.0. Some of the
+older systems have an earlier version of sqlite installed by default and for those system you need to manually
+upgrade SQLite to use version newer than 3.15.0. Note, that this is not a ``python library`` version, it's the
+SQLite system-level application that needs to be upgraded. There are different ways how SQLIte might be
+installed, you can find some information about that at the `official website of SQLite
+<https://www.sqlite.org/index.html>`_ and in the documentation specific to distribution of your Operating
+System.
+
+**Troubleshooting**
+
+Sometimes even if you upgrade SQLite to higher version and your local python reports higher version,
+the python interpreter used by Airflow might still use the older version available in the
+``LD_LIBRARY_PATH`` set for the python interpreter that is used to start Airflow.
+
+You can make sure which version is used by the interpreter by running this check:
+
+.. code-block:: bash
+
+    root@b8a8e73caa2c:/opt/airflow# python
+    Python 3.6.12 (default, Nov 25 2020, 03:59:00)
+    [GCC 8.3.0] on linux
+    Type "help", "copyright", "credits" or "license" for more information.
+    >>> import sqlite3
+    >>> sqlite3.sqlite_version
+    '3.27.2'
+    >>>
+
+But be aware that setting environment variables for your Airflow deployment might change which SQLite
+library is found first, so you might want to make sure that the "high-enough" version of SQLite is the only
+version installed in your system.
+
+An example URI for the sqlite database:
+
+.. code-block:: text
+
+    sqlite:////home/airflow/airflow.db
+
+
 Setting up a MySQL Database
 ---------------------------
 


[airflow] 31/41: Support google-cloud-logging` >=2.0.0 (#13801)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit be04073226a99a7eb57f2eeac929263493105815
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Wed Feb 3 04:16:50 2021 +0100

    Support google-cloud-logging` >=2.0.0 (#13801)
    
    (cherry picked from commit 0e8c77b93a5ca5ecfdcd1c4bd91f54846fc15d57)
---
 airflow/providers/google/ADDITIONAL_INFO.md        |   1 +
 .../google/cloud/log/stackdriver_task_handler.py   |  72 +++++--
 setup.py                                           |   2 +-
 .../cloud/log/test_stackdriver_task_handler.py     | 225 +++++++++++++--------
 4 files changed, 200 insertions(+), 100 deletions(-)

diff --git a/airflow/providers/google/ADDITIONAL_INFO.md b/airflow/providers/google/ADDITIONAL_INFO.md
index 9cf9853..a363051 100644
--- a/airflow/providers/google/ADDITIONAL_INFO.md
+++ b/airflow/providers/google/ADDITIONAL_INFO.md
@@ -34,6 +34,7 @@ Details are covered in the UPDATING.md files for each library, but there are som
 | [``google-cloud-datacatalog``](https://pypi.org/project/google-cloud-datacatalog/) | ``>=0.5.0,<0.8`` | ``>=3.0.0,<4.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-datacatalog/blob/master/UPGRADING.md) |
 | [``google-cloud-dataproc``](https://pypi.org/project/google-cloud-dataproc/) | ``>=1.0.1,<2.0.0`` | ``>=2.2.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-dataproc/blob/master/UPGRADING.md) |
 | [``google-cloud-kms``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-kms/blob/master/UPGRADING.md) |
+| [``google-cloud-logging``](https://pypi.org/project/google-cloud-logging/) | ``>=1.14.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-logging/blob/master/UPGRADING.md) |
 | [``google-cloud-monitoring``](https://pypi.org/project/google-cloud-monitoring/) | ``>=0.34.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-monitoring/blob/master/UPGRADING.md) |
 | [``google-cloud-os-login``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-oslogin/blob/master/UPGRADING.md) |
 | [``google-cloud-pubsub``](https://pypi.org/project/google-cloud-pubsub/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-pubsub/blob/master/UPGRADING.md) |
diff --git a/airflow/providers/google/cloud/log/stackdriver_task_handler.py b/airflow/providers/google/cloud/log/stackdriver_task_handler.py
index be75fcd..5479185 100644
--- a/airflow/providers/google/cloud/log/stackdriver_task_handler.py
+++ b/airflow/providers/google/cloud/log/stackdriver_task_handler.py
@@ -21,9 +21,12 @@ from urllib.parse import urlencode
 
 from cached_property import cached_property
 from google.api_core.gapic_v1.client_info import ClientInfo
+from google.auth.credentials import Credentials
 from google.cloud import logging as gcp_logging
+from google.cloud.logging import Resource
 from google.cloud.logging.handlers.transports import BackgroundThreadTransport, Transport
-from google.cloud.logging.resource import Resource
+from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client
+from google.cloud.logging_v2.types import ListLogEntriesRequest, ListLogEntriesResponse
 
 from airflow import version
 from airflow.models import TaskInstance
@@ -99,13 +102,19 @@ class StackdriverTaskHandler(logging.Handler):
         self.resource: Resource = resource
         self.labels: Optional[Dict[str, str]] = labels
         self.task_instance_labels: Optional[Dict[str, str]] = {}
+        self.task_instance_hostname = 'default-hostname'
 
     @cached_property
-    def _client(self) -> gcp_logging.Client:
-        """Google Cloud Library API client"""
+    def _credentials_and_project(self) -> Tuple[Credentials, str]:
         credentials, project = get_credentials_and_project_id(
             key_path=self.gcp_key_path, scopes=self.scopes, disable_logging=True
         )
+        return credentials, project
+
+    @property
+    def _client(self) -> gcp_logging.Client:
+        """The Cloud Library API client"""
+        credentials, project = self._credentials_and_project
         client = gcp_logging.Client(
             credentials=credentials,
             project=project,
@@ -113,6 +122,16 @@ class StackdriverTaskHandler(logging.Handler):
         )
         return client
 
+    @property
+    def _logging_service_client(self) -> LoggingServiceV2Client:
+        """The Cloud logging service v2 client."""
+        credentials, _ = self._credentials_and_project
+        client = LoggingServiceV2Client(
+            credentials=credentials,
+            client_info=ClientInfo(client_library_version='airflow_v' + version.version),
+        )
+        return client
+
     @cached_property
     def _transport(self) -> Transport:
         """Object responsible for sending data to Stackdriver"""
@@ -146,10 +165,11 @@ class StackdriverTaskHandler(logging.Handler):
         :type task_instance:  :class:`airflow.models.TaskInstance`
         """
         self.task_instance_labels = self._task_instance_to_labels(task_instance)
+        self.task_instance_hostname = task_instance.hostname
 
     def read(
         self, task_instance: TaskInstance, try_number: Optional[int] = None, metadata: Optional[Dict] = None
-    ) -> Tuple[List[str], List[Dict]]:
+    ) -> Tuple[List[Tuple[Tuple[str, str]]], List[Dict[str, str]]]:
         """
         Read logs of given task instance from Stackdriver logging.
 
@@ -160,12 +180,14 @@ class StackdriverTaskHandler(logging.Handler):
         :type try_number: Optional[int]
         :param metadata: log metadata. It is used for steaming log reading and auto-tailing.
         :type metadata: Dict
-        :return: a tuple of list of logs and list of metadata
-        :rtype: Tuple[List[str], List[Dict]]
+        :return: a tuple of (
+            list of (one element tuple with two element tuple - hostname and logs)
+            and list of metadata)
+        :rtype: Tuple[List[Tuple[Tuple[str, str]]], List[Dict[str, str]]]
         """
         if try_number is not None and try_number < 1:
-            logs = [f"Error fetching the logs. Try number {try_number} is invalid."]
-            return logs, [{"end_of_log": "true"}]
+            logs = f"Error fetching the logs. Try number {try_number} is invalid."
+            return [((self.task_instance_hostname, logs),)], [{"end_of_log": "true"}]
 
         if not metadata:
             metadata = {}
@@ -188,7 +210,7 @@ class StackdriverTaskHandler(logging.Handler):
         if next_page_token:
             new_metadata['next_page_token'] = next_page_token
 
-        return [messages], [new_metadata]
+        return [((self.task_instance_hostname, messages),)], [new_metadata]
 
     def _prepare_log_filter(self, ti_labels: Dict[str, str]) -> str:
         """
@@ -210,9 +232,10 @@ class StackdriverTaskHandler(logging.Handler):
             escaped_value = value.replace("\\", "\\\\").replace('"', '\\"')
             return f'"{escaped_value}"'
 
+        _, project = self._credentials_and_project
         log_filters = [
             f'resource.type={escale_label_value(self.resource.type)}',
-            f'logName="projects/{self._client.project}/logs/{self.name}"',
+            f'logName="projects/{project}/logs/{self.name}"',
         ]
 
         for key, value in self.resource.labels.items():
@@ -252,6 +275,8 @@ class StackdriverTaskHandler(logging.Handler):
                     log_filter=log_filter, page_token=next_page_token
                 )
                 messages.append(new_messages)
+                if not messages:
+                    break
 
             end_of_log = True
             next_page_token = None
@@ -271,15 +296,21 @@ class StackdriverTaskHandler(logging.Handler):
         :return: Downloaded logs and next page token
         :rtype: Tuple[str, str]
         """
-        entries = self._client.list_entries(filter_=log_filter, page_token=page_token)
-        page = next(entries.pages)
-        next_page_token = entries.next_page_token
+        _, project = self._credentials_and_project
+        request = ListLogEntriesRequest(
+            resource_names=[f'projects/{project}'],
+            filter=log_filter,
+            page_token=page_token,
+            order_by='timestamp asc',
+            page_size=1000,
+        )
+        response = self._logging_service_client.list_log_entries(request=request)
+        page: ListLogEntriesResponse = next(response.pages)
         messages = []
-        for entry in page:
-            if "message" in entry.payload:
-                messages.append(entry.payload["message"])
-
-        return "\n".join(messages), next_page_token
+        for entry in page.entries:
+            if "message" in entry.json_payload:
+                messages.append(entry.json_payload["message"])
+        return "\n".join(messages), page.next_page_token
 
     @classmethod
     def _task_instance_to_labels(cls, ti: TaskInstance) -> Dict[str, str]:
@@ -315,7 +346,7 @@ class StackdriverTaskHandler(logging.Handler):
         :return: URL to the external log collection service
         :rtype: str
         """
-        project_id = self._client.project
+        _, project_id = self._credentials_and_project
 
         ti_labels = self._task_instance_to_labels(task_instance)
         ti_labels[self.LABEL_TRY_NUMBER] = str(try_number)
@@ -331,3 +362,6 @@ class StackdriverTaskHandler(logging.Handler):
 
         url = f"{self.LOG_VIEWER_BASE_URL}?{urlencode(url_query_string)}"
         return url
+
+    def close(self) -> None:
+        self._transport.flush()
diff --git a/setup.py b/setup.py
index fa1e73a..7beb684 100644
--- a/setup.py
+++ b/setup.py
@@ -292,7 +292,7 @@ google = [
     'google-cloud-dlp>=0.11.0,<2.0.0',
     'google-cloud-kms>=2.0.0,<3.0.0',
     'google-cloud-language>=1.1.1,<2.0.0',
-    'google-cloud-logging>=1.14.0,<2.0.0',
+    'google-cloud-logging>=2.1.1,<3.0.0',
     'google-cloud-memcache>=0.2.0',
     'google-cloud-monitoring>=2.0.0,<3.0.0',
     'google-cloud-os-login>=2.0.0,<3.0.0',
diff --git a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py b/tests/providers/google/cloud/log/test_stackdriver_task_handler.py
index 4159e9e..b4dbf69 100644
--- a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py
+++ b/tests/providers/google/cloud/log/test_stackdriver_task_handler.py
@@ -21,7 +21,8 @@ from datetime import datetime
 from unittest import mock
 from urllib.parse import parse_qs, urlparse
 
-from google.cloud.logging.resource import Resource
+from google.cloud.logging import Resource
+from google.cloud.logging_v2.types import ListLogEntriesRequest, ListLogEntriesResponse, LogEntry
 
 from airflow.models import TaskInstance
 from airflow.models.dag import DAG
@@ -30,15 +31,27 @@ from airflow.providers.google.cloud.log.stackdriver_task_handler import Stackdri
 from airflow.utils.state import State
 
 
-def _create_list_response(messages, token):
-    page = [mock.MagicMock(payload={"message": message}) for message in messages]
-    return mock.MagicMock(pages=(n for n in [page]), next_page_token=token)
+def _create_list_log_entries_response_mock(messages, token):
+    return ListLogEntriesResponse(
+        entries=[LogEntry(json_payload={"message": message}) for message in messages], next_page_token=token
+    )
+
+
+def _remove_stackdriver_handlers():
+    for handler_ref in reversed(logging._handlerList[:]):
+        handler = handler_ref()
+        if not isinstance(handler, StackdriverTaskHandler):
+            continue
+        logging._removeHandlerRef(handler_ref)
+        del handler
 
 
 class TestStackdriverLoggingHandlerStandalone(unittest.TestCase):
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id')
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client')
     def test_should_pass_message_to_client(self, mock_client, mock_get_creds_and_project_id):
+        self.addCleanup(_remove_stackdriver_handlers)
+
         mock_get_creds_and_project_id.return_value = ('creds', 'project_id')
 
         transport_type = mock.MagicMock()
@@ -69,6 +82,7 @@ class TestStackdriverLoggingHandlerTask(unittest.TestCase):
         self.ti.try_number = 1
         self.ti.state = State.RUNNING
         self.addCleanup(self.dag.clear)
+        self.addCleanup(_remove_stackdriver_handlers)
 
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id')
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client')
@@ -118,107 +132,153 @@ class TestStackdriverLoggingHandlerTask(unittest.TestCase):
         )
 
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id')
-    @mock.patch(
-        'airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client',
-        **{'return_value.project': 'asf-project'},  # type: ignore
-    )
+    @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client')
     def test_should_read_logs_for_all_try(self, mock_client, mock_get_creds_and_project_id):
-        mock_client.return_value.list_entries.return_value = _create_list_response(["MSG1", "MSG2"], None)
+        mock_client.return_value.list_log_entries.return_value.pages = iter(
+            [_create_list_log_entries_response_mock(["MSG1", "MSG2"], None)]
+        )
         mock_get_creds_and_project_id.return_value = ('creds', 'project_id')
 
         logs, metadata = self.stackdriver_task_handler.read(self.ti)
-        mock_client.return_value.list_entries.assert_called_once_with(
-            filter_='resource.type="global"\n'
-            'logName="projects/asf-project/logs/airflow"\n'
-            'labels.task_id="task_for_testing_file_log_handler"\n'
-            'labels.dag_id="dag_for_testing_file_task_handler"\n'
-            'labels.execution_date="2016-01-01T00:00:00+00:00"',
-            page_token=None,
+        mock_client.return_value.list_log_entries.assert_called_once_with(
+            request=ListLogEntriesRequest(
+                resource_names=["projects/project_id"],
+                filter=(
+                    'resource.type="global"\n'
+                    'logName="projects/project_id/logs/airflow"\n'
+                    'labels.task_id="task_for_testing_file_log_handler"\n'
+                    'labels.dag_id="dag_for_testing_file_task_handler"\n'
+                    'labels.execution_date="2016-01-01T00:00:00+00:00"'
+                ),
+                order_by='timestamp asc',
+                page_size=1000,
+                page_token=None,
+            )
         )
-        assert ['MSG1\nMSG2'] == logs
+        assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs
         assert [{'end_of_log': True}] == metadata
 
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id')
-    @mock.patch(
-        'airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client',
-        **{'return_value.project': 'asf-project'},  # type: ignore
-    )
+    @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client')
     def test_should_read_logs_for_task_with_quote(self, mock_client, mock_get_creds_and_project_id):
-        mock_client.return_value.list_entries.return_value = _create_list_response(["MSG1", "MSG2"], None)
+        mock_client.return_value.list_log_entries.return_value.pages = iter(
+            [_create_list_log_entries_response_mock(["MSG1", "MSG2"], None)]
+        )
         mock_get_creds_and_project_id.return_value = ('creds', 'project_id')
         self.ti.task_id = "K\"OT"
         logs, metadata = self.stackdriver_task_handler.read(self.ti)
-        mock_client.return_value.list_entries.assert_called_once_with(
-            filter_='resource.type="global"\n'
-            'logName="projects/asf-project/logs/airflow"\n'
-            'labels.task_id="K\\"OT"\n'
-            'labels.dag_id="dag_for_testing_file_task_handler"\n'
-            'labels.execution_date="2016-01-01T00:00:00+00:00"',
-            page_token=None,
+        mock_client.return_value.list_log_entries.assert_called_once_with(
+            request=ListLogEntriesRequest(
+                resource_names=["projects/project_id"],
+                filter=(
+                    'resource.type="global"\n'
+                    'logName="projects/project_id/logs/airflow"\n'
+                    'labels.task_id="K\\"OT"\n'
+                    'labels.dag_id="dag_for_testing_file_task_handler"\n'
+                    'labels.execution_date="2016-01-01T00:00:00+00:00"'
+                ),
+                order_by='timestamp asc',
+                page_size=1000,
+                page_token=None,
+            )
         )
-        assert ['MSG1\nMSG2'] == logs
+        assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs
         assert [{'end_of_log': True}] == metadata
 
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id')
-    @mock.patch(
-        'airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client',
-        **{'return_value.project': 'asf-project'},  # type: ignore
-    )
+    @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client')
     def test_should_read_logs_for_single_try(self, mock_client, mock_get_creds_and_project_id):
-        mock_client.return_value.list_entries.return_value = _create_list_response(["MSG1", "MSG2"], None)
+        mock_client.return_value.list_log_entries.return_value.pages = iter(
+            [_create_list_log_entries_response_mock(["MSG1", "MSG2"], None)]
+        )
         mock_get_creds_and_project_id.return_value = ('creds', 'project_id')
 
         logs, metadata = self.stackdriver_task_handler.read(self.ti, 3)
-        mock_client.return_value.list_entries.assert_called_once_with(
-            filter_='resource.type="global"\n'
-            'logName="projects/asf-project/logs/airflow"\n'
-            'labels.task_id="task_for_testing_file_log_handler"\n'
-            'labels.dag_id="dag_for_testing_file_task_handler"\n'
-            'labels.execution_date="2016-01-01T00:00:00+00:00"\n'
-            'labels.try_number="3"',
-            page_token=None,
+        mock_client.return_value.list_log_entries.assert_called_once_with(
+            request=ListLogEntriesRequest(
+                resource_names=["projects/project_id"],
+                filter=(
+                    'resource.type="global"\n'
+                    'logName="projects/project_id/logs/airflow"\n'
+                    'labels.task_id="task_for_testing_file_log_handler"\n'
+                    'labels.dag_id="dag_for_testing_file_task_handler"\n'
+                    'labels.execution_date="2016-01-01T00:00:00+00:00"\n'
+                    'labels.try_number="3"'
+                ),
+                order_by='timestamp asc',
+                page_size=1000,
+                page_token=None,
+            )
         )
-        assert ['MSG1\nMSG2'] == logs
+        assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs
         assert [{'end_of_log': True}] == metadata
 
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id')
-    @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client')
+    @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client')
     def test_should_read_logs_with_pagination(self, mock_client, mock_get_creds_and_project_id):
-        mock_client.return_value.list_entries.side_effect = [
-            _create_list_response(["MSG1", "MSG2"], "TOKEN1"),
-            _create_list_response(["MSG3", "MSG4"], None),
+        mock_client.return_value.list_log_entries.side_effect = [
+            mock.MagicMock(pages=iter([_create_list_log_entries_response_mock(["MSG1", "MSG2"], "TOKEN1")])),
+            mock.MagicMock(pages=iter([_create_list_log_entries_response_mock(["MSG3", "MSG4"], None)])),
         ]
         mock_get_creds_and_project_id.return_value = ('creds', 'project_id')
         logs, metadata1 = self.stackdriver_task_handler.read(self.ti, 3)
-        mock_client.return_value.list_entries.assert_called_once_with(filter_=mock.ANY, page_token=None)
-        assert ['MSG1\nMSG2'] == logs
+        mock_client.return_value.list_log_entries.assert_called_once_with(
+            request=ListLogEntriesRequest(
+                resource_names=["projects/project_id"],
+                filter=(
+                    '''resource.type="global"
+logName="projects/project_id/logs/airflow"
+labels.task_id="task_for_testing_file_log_handler"
+labels.dag_id="dag_for_testing_file_task_handler"
+labels.execution_date="2016-01-01T00:00:00+00:00"
+labels.try_number="3"'''
+                ),
+                order_by='timestamp asc',
+                page_size=1000,
+                page_token=None,
+            )
+        )
+        assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs
         assert [{'end_of_log': False, 'next_page_token': 'TOKEN1'}] == metadata1
 
-        mock_client.return_value.list_entries.return_value.next_page_token = None
+        mock_client.return_value.list_log_entries.return_value.next_page_token = None
         logs, metadata2 = self.stackdriver_task_handler.read(self.ti, 3, metadata1[0])
-        mock_client.return_value.list_entries.assert_called_with(filter_=mock.ANY, page_token="TOKEN1")
-        assert ['MSG3\nMSG4'] == logs
+
+        mock_client.return_value.list_log_entries.assert_called_with(
+            request=ListLogEntriesRequest(
+                resource_names=["projects/project_id"],
+                filter=(
+                    'resource.type="global"\n'
+                    'logName="projects/project_id/logs/airflow"\n'
+                    'labels.task_id="task_for_testing_file_log_handler"\n'
+                    'labels.dag_id="dag_for_testing_file_task_handler"\n'
+                    'labels.execution_date="2016-01-01T00:00:00+00:00"\n'
+                    'labels.try_number="3"'
+                ),
+                order_by='timestamp asc',
+                page_size=1000,
+                page_token="TOKEN1",
+            )
+        )
+        assert [(('default-hostname', 'MSG3\nMSG4'),)] == logs
         assert [{'end_of_log': True}] == metadata2
 
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id')
-    @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client')
+    @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client')
     def test_should_read_logs_with_download(self, mock_client, mock_get_creds_and_project_id):
-        mock_client.return_value.list_entries.side_effect = [
-            _create_list_response(["MSG1", "MSG2"], "TOKEN1"),
-            _create_list_response(["MSG3", "MSG4"], None),
+        mock_client.return_value.list_log_entries.side_effect = [
+            mock.MagicMock(pages=iter([_create_list_log_entries_response_mock(["MSG1", "MSG2"], "TOKEN1")])),
+            mock.MagicMock(pages=iter([_create_list_log_entries_response_mock(["MSG3", "MSG4"], None)])),
         ]
         mock_get_creds_and_project_id.return_value = ('creds', 'project_id')
 
         logs, metadata1 = self.stackdriver_task_handler.read(self.ti, 3, {'download_logs': True})
 
-        assert ['MSG1\nMSG2\nMSG3\nMSG4'] == logs
+        assert [(('default-hostname', 'MSG1\nMSG2\nMSG3\nMSG4'),)] == logs
         assert [{'end_of_log': True}] == metadata1
 
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id')
-    @mock.patch(
-        'airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client',
-        **{'return_value.project': 'asf-project'},  # type: ignore
-    )
+    @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client')
     def test_should_read_logs_with_custom_resources(self, mock_client, mock_get_creds_and_project_id):
         mock_get_creds_and_project_id.return_value = ('creds', 'project_id')
         resource = Resource(
@@ -226,31 +286,37 @@ class TestStackdriverLoggingHandlerTask(unittest.TestCase):
             labels={
                 "environment.name": 'test-instancce',
                 "location": 'europpe-west-3',
-                "project_id": "asf-project",
+                "project_id": "project_id",
             },
         )
         self.stackdriver_task_handler = StackdriverTaskHandler(
             transport=self.transport_mock, resource=resource
         )
 
-        entry = mock.MagicMock(payload={"message": "TEXT"})
-        page = [entry, entry]
-        mock_client.return_value.list_entries.return_value.pages = (n for n in [page])
-        mock_client.return_value.list_entries.return_value.next_page_token = None
+        entry = mock.MagicMock(json_payload={"message": "TEXT"})
+        page = mock.MagicMock(entries=[entry, entry], next_page_token=None)
+        mock_client.return_value.list_log_entries.return_value.pages = (n for n in [page])
 
         logs, metadata = self.stackdriver_task_handler.read(self.ti)
-        mock_client.return_value.list_entries.assert_called_once_with(
-            filter_='resource.type="cloud_composer_environment"\n'
-            'logName="projects/asf-project/logs/airflow"\n'
-            'resource.labels."environment.name"="test-instancce"\n'
-            'resource.labels.location="europpe-west-3"\n'
-            'resource.labels.project_id="asf-project"\n'
-            'labels.task_id="task_for_testing_file_log_handler"\n'
-            'labels.dag_id="dag_for_testing_file_task_handler"\n'
-            'labels.execution_date="2016-01-01T00:00:00+00:00"',
-            page_token=None,
+        mock_client.return_value.list_log_entries.assert_called_once_with(
+            request=ListLogEntriesRequest(
+                resource_names=["projects/project_id"],
+                filter=(
+                    'resource.type="cloud_composer_environment"\n'
+                    'logName="projects/project_id/logs/airflow"\n'
+                    'resource.labels."environment.name"="test-instancce"\n'
+                    'resource.labels.location="europpe-west-3"\n'
+                    'resource.labels.project_id="project_id"\n'
+                    'labels.task_id="task_for_testing_file_log_handler"\n'
+                    'labels.dag_id="dag_for_testing_file_task_handler"\n'
+                    'labels.execution_date="2016-01-01T00:00:00+00:00"'
+                ),
+                order_by='timestamp asc',
+                page_size=1000,
+                page_token=None,
+            )
         )
-        assert ['TEXT\nTEXT'] == logs
+        assert [(('default-hostname', 'TEXT\nTEXT'),)] == logs
         assert [{'end_of_log': True}] == metadata
 
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id')
@@ -278,10 +344,9 @@ class TestStackdriverLoggingHandlerTask(unittest.TestCase):
         assert mock_client.return_value == client
 
     @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id')
-    @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client')
+    @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client')
     def test_should_return_valid_external_url(self, mock_client, mock_get_creds_and_project_id):
         mock_get_creds_and_project_id.return_value = ('creds', 'project_id')
-        mock_client.return_value.project = 'project_id'
 
         stackdriver_task_handler = StackdriverTaskHandler(
             gcp_key_path="KEY_PATH",


[airflow] 27/41: Support google-cloud-automl >=2.1.0 (#13505)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit a3b6e47229da2f7da982d1f6afad6aab394bbd27
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Mon Jan 11 09:39:44 2021 +0100

    Support google-cloud-automl >=2.1.0 (#13505)
    
    (cherry picked from commit a6f999b62e3c9aeb10ab24342674d3670a8ad259)
---
 airflow/providers/google/ADDITIONAL_INFO.md        |   1 +
 .../cloud/example_dags/example_automl_tables.py    |   6 +-
 airflow/providers/google/cloud/hooks/automl.py     | 103 +++++++++++----------
 airflow/providers/google/cloud/operators/automl.py |  36 +++----
 setup.py                                           |   2 +-
 tests/providers/google/cloud/hooks/test_automl.py  |  70 +++++++-------
 .../google/cloud/operators/test_automl.py          |  29 ++++--
 7 files changed, 134 insertions(+), 113 deletions(-)

diff --git a/airflow/providers/google/ADDITIONAL_INFO.md b/airflow/providers/google/ADDITIONAL_INFO.md
index d80f9e1..800703b 100644
--- a/airflow/providers/google/ADDITIONAL_INFO.md
+++ b/airflow/providers/google/ADDITIONAL_INFO.md
@@ -29,6 +29,7 @@ Details are covered in the UPDATING.md files for each library, but there are som
 
 | Library name | Previous constraints | Current constraints | |
 | --- | --- | --- | --- |
+| [``google-cloud-automl``](https://pypi.org/project/google-cloud-automl/) | ``>=0.4.0,<2.0.0`` | ``>=2.1.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-bigquery-automl/blob/master/UPGRADING.md) |
 | [``google-cloud-bigquery-datatransfer``](https://pypi.org/project/google-cloud-bigquery-datatransfer/) | ``>=0.4.0,<2.0.0`` | ``>=3.0.0,<4.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-bigquery-datatransfer/blob/master/UPGRADING.md) |
 | [``google-cloud-datacatalog``](https://pypi.org/project/google-cloud-datacatalog/) | ``>=0.5.0,<0.8`` | ``>=3.0.0,<4.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-datacatalog/blob/master/UPGRADING.md) |
 | [``google-cloud-os-login``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-oslogin/blob/master/UPGRADING.md) |
diff --git a/airflow/providers/google/cloud/example_dags/example_automl_tables.py b/airflow/providers/google/cloud/example_dags/example_automl_tables.py
index 4ff92b3..117bd34 100644
--- a/airflow/providers/google/cloud/example_dags/example_automl_tables.py
+++ b/airflow/providers/google/cloud/example_dags/example_automl_tables.py
@@ -47,7 +47,7 @@ GCP_AUTOML_LOCATION = os.environ.get("GCP_AUTOML_LOCATION", "us-central1")
 GCP_AUTOML_DATASET_BUCKET = os.environ.get(
     "GCP_AUTOML_DATASET_BUCKET", "gs://cloud-ml-tables-data/bank-marketing.csv"
 )
-TARGET = os.environ.get("GCP_AUTOML_TARGET", "Class")
+TARGET = os.environ.get("GCP_AUTOML_TARGET", "Deposit")
 
 # Example values
 MODEL_ID = "TBL123456"
@@ -76,9 +76,9 @@ def get_target_column_spec(columns_specs: List[Dict], column_name: str) -> str:
     Using column name returns spec of the column.
     """
     for column in columns_specs:
-        if column["displayName"] == column_name:
+        if column["display_name"] == column_name:
             return extract_object_id(column)
-    return ""
+    raise Exception(f"Unknown target column: {column_name}")
 
 
 # Example DAG to create dataset, train model_id and deploy it.
diff --git a/airflow/providers/google/cloud/hooks/automl.py b/airflow/providers/google/cloud/hooks/automl.py
index 78ec4fb..75d7037 100644
--- a/airflow/providers/google/cloud/hooks/automl.py
+++ b/airflow/providers/google/cloud/hooks/automl.py
@@ -20,22 +20,23 @@
 from typing import Dict, List, Optional, Sequence, Tuple, Union
 
 from cached_property import cached_property
+from google.api_core.operation import Operation
 from google.api_core.retry import Retry
-from google.cloud.automl_v1beta1 import AutoMlClient, PredictionServiceClient
-from google.cloud.automl_v1beta1.types import (
+from google.cloud.automl_v1beta1 import (
+    AutoMlClient,
     BatchPredictInputConfig,
     BatchPredictOutputConfig,
     ColumnSpec,
     Dataset,
     ExamplePayload,
-    FieldMask,
     ImageObjectDetectionModelDeploymentMetadata,
     InputConfig,
     Model,
-    Operation,
+    PredictionServiceClient,
     PredictResponse,
     TableSpec,
 )
+from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
 
@@ -123,9 +124,9 @@ class CloudAutoMLHook(GoogleBaseHook):
         :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance
         """
         client = self.get_conn()
-        parent = client.location_path(project_id, location)
+        parent = f"projects/{project_id}/locations/{location}"
         return client.create_model(
-            parent=parent, model=model, retry=retry, timeout=timeout, metadata=metadata
+            request={'parent': parent, 'model': model}, retry=retry, timeout=timeout, metadata=metadata or ()
         )
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -176,15 +177,17 @@ class CloudAutoMLHook(GoogleBaseHook):
         :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance
         """
         client = self.prediction_client
-        name = client.model_path(project=project_id, location=location, model=model_id)
+        name = f"projects/{project_id}/locations/{location}/models/{model_id}"
         result = client.batch_predict(
-            name=name,
-            input_config=input_config,
-            output_config=output_config,
-            params=params,
+            request={
+                'name': name,
+                'input_config': input_config,
+                'output_config': output_config,
+                'params': params,
+            },
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         return result
 
@@ -229,14 +232,12 @@ class CloudAutoMLHook(GoogleBaseHook):
         :return: `google.cloud.automl_v1beta1.types.PredictResponse` instance
         """
         client = self.prediction_client
-        name = client.model_path(project=project_id, location=location, model=model_id)
+        name = f"projects/{project_id}/locations/{location}/models/{model_id}"
         result = client.predict(
-            name=name,
-            payload=payload,
-            params=params,
+            request={'name': name, 'payload': payload, 'params': params},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         return result
 
@@ -273,13 +274,12 @@ class CloudAutoMLHook(GoogleBaseHook):
         :return: `google.cloud.automl_v1beta1.types.Dataset` instance.
         """
         client = self.get_conn()
-        parent = client.location_path(project=project_id, location=location)
+        parent = f"projects/{project_id}/locations/{location}"
         result = client.create_dataset(
-            parent=parent,
-            dataset=dataset,
+            request={'parent': parent, 'dataset': dataset},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         return result
 
@@ -319,13 +319,12 @@ class CloudAutoMLHook(GoogleBaseHook):
         :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance
         """
         client = self.get_conn()
-        name = client.dataset_path(project=project_id, location=location, dataset=dataset_id)
+        name = f"projects/{project_id}/locations/{location}/datasets/{dataset_id}"
         result = client.import_data(
-            name=name,
-            input_config=input_config,
+            request={'name': name, 'input_config': input_config},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         return result
 
@@ -385,13 +384,10 @@ class CloudAutoMLHook(GoogleBaseHook):
             table_spec=table_spec_id,
         )
         result = client.list_column_specs(
-            parent=parent,
-            field_mask=field_mask,
-            filter_=filter_,
-            page_size=page_size,
+            request={'parent': parent, 'field_mask': field_mask, 'filter': filter_, 'page_size': page_size},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         return result
 
@@ -427,8 +423,10 @@ class CloudAutoMLHook(GoogleBaseHook):
         :return: `google.cloud.automl_v1beta1.types.Model` instance.
         """
         client = self.get_conn()
-        name = client.model_path(project=project_id, location=location, model=model_id)
-        result = client.get_model(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        name = f"projects/{project_id}/locations/{location}/models/{model_id}"
+        result = client.get_model(
+            request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
         return result
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -463,8 +461,10 @@ class CloudAutoMLHook(GoogleBaseHook):
         :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance.
         """
         client = self.get_conn()
-        name = client.model_path(project=project_id, location=location, model=model_id)
-        result = client.delete_model(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        name = f"projects/{project_id}/locations/{location}/models/{model_id}"
+        result = client.delete_model(
+            request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
         return result
 
     def update_dataset(
@@ -497,11 +497,10 @@ class CloudAutoMLHook(GoogleBaseHook):
         """
         client = self.get_conn()
         result = client.update_dataset(
-            dataset=dataset,
-            update_mask=update_mask,
+            request={'dataset': dataset, 'update_mask': update_mask},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         return result
 
@@ -547,13 +546,15 @@ class CloudAutoMLHook(GoogleBaseHook):
         :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance.
         """
         client = self.get_conn()
-        name = client.model_path(project=project_id, location=location, model=model_id)
+        name = f"projects/{project_id}/locations/{location}/models/{model_id}"
         result = client.deploy_model(
-            name=name,
+            request={
+                'name': name,
+                'image_object_detection_model_deployment_metadata': image_detection_metadata,
+            },
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
-            image_object_detection_model_deployment_metadata=image_detection_metadata,
+            metadata=metadata or (),
         )
         return result
 
@@ -601,14 +602,12 @@ class CloudAutoMLHook(GoogleBaseHook):
             of the response through the `options` parameter.
         """
         client = self.get_conn()
-        parent = client.dataset_path(project=project_id, location=location, dataset=dataset_id)
+        parent = f"projects/{project_id}/locations/{location}/datasets/{dataset_id}"
         result = client.list_table_specs(
-            parent=parent,
-            filter_=filter_,
-            page_size=page_size,
+            request={'parent': parent, 'filter': filter_, 'page_size': page_size},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         return result
 
@@ -644,8 +643,10 @@ class CloudAutoMLHook(GoogleBaseHook):
             of the response through the `options` parameter.
         """
         client = self.get_conn()
-        parent = client.location_path(project=project_id, location=location)
-        result = client.list_datasets(parent=parent, retry=retry, timeout=timeout, metadata=metadata)
+        parent = f"projects/{project_id}/locations/{location}"
+        result = client.list_datasets(
+            request={'parent': parent}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
         return result
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -680,6 +681,8 @@ class CloudAutoMLHook(GoogleBaseHook):
         :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance
         """
         client = self.get_conn()
-        name = client.dataset_path(project=project_id, location=location, dataset=dataset_id)
-        result = client.delete_dataset(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        name = f"projects/{project_id}/locations/{location}/datasets/{dataset_id}"
+        result = client.delete_dataset(
+            request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
         return result
diff --git a/airflow/providers/google/cloud/operators/automl.py b/airflow/providers/google/cloud/operators/automl.py
index a1823cd..cdf79b0 100644
--- a/airflow/providers/google/cloud/operators/automl.py
+++ b/airflow/providers/google/cloud/operators/automl.py
@@ -22,7 +22,14 @@ import ast
 from typing import Dict, List, Optional, Sequence, Tuple, Union
 
 from google.api_core.retry import Retry
-from google.protobuf.json_format import MessageToDict
+from google.cloud.automl_v1beta1 import (
+    BatchPredictResult,
+    ColumnSpec,
+    Dataset,
+    Model,
+    PredictResponse,
+    TableSpec,
+)
 
 from airflow.models import BaseOperator
 from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
@@ -113,7 +120,7 @@ class AutoMLTrainModelOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        result = MessageToDict(operation.result())
+        result = Model.to_dict(operation.result())
         model_id = hook.extract_object_id(result)
         self.log.info("Model created: %s", model_id)
 
@@ -212,7 +219,7 @@ class AutoMLPredictOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(result)
+        return PredictResponse.to_dict(result)
 
 
 class AutoMLBatchPredictOperator(BaseOperator):
@@ -324,7 +331,7 @@ class AutoMLBatchPredictOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        result = MessageToDict(operation.result())
+        result = BatchPredictResult.to_dict(operation.result())
         self.log.info("Batch prediction ready.")
         return result
 
@@ -414,7 +421,7 @@ class AutoMLCreateDatasetOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        result = MessageToDict(result)
+        result = Dataset.to_dict(result)
         dataset_id = hook.extract_object_id(result)
         self.log.info("Creating completed. Dataset id: %s", dataset_id)
 
@@ -513,9 +520,8 @@ class AutoMLImportDataOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        result = MessageToDict(operation.result())
+        operation.result()
         self.log.info("Import completed")
-        return result
 
 
 class AutoMLTablesListColumnSpecsOperator(BaseOperator):
@@ -627,7 +633,7 @@ class AutoMLTablesListColumnSpecsOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        result = [MessageToDict(spec) for spec in page_iterator]
+        result = [ColumnSpec.to_dict(spec) for spec in page_iterator]
         self.log.info("Columns specs obtained.")
 
         return result
@@ -718,7 +724,7 @@ class AutoMLTablesUpdateDatasetOperator(BaseOperator):
             metadata=self.metadata,
         )
         self.log.info("Dataset updated.")
-        return MessageToDict(result)
+        return Dataset.to_dict(result)
 
 
 class AutoMLGetModelOperator(BaseOperator):
@@ -804,7 +810,7 @@ class AutoMLGetModelOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(result)
+        return Model.to_dict(result)
 
 
 class AutoMLDeleteModelOperator(BaseOperator):
@@ -890,8 +896,7 @@ class AutoMLDeleteModelOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        result = MessageToDict(operation.result())
-        return result
+        operation.result()
 
 
 class AutoMLDeployModelOperator(BaseOperator):
@@ -991,9 +996,8 @@ class AutoMLDeployModelOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        result = MessageToDict(operation.result())
+        operation.result()
         self.log.info("Model deployed.")
-        return result
 
 
 class AutoMLTablesListTableSpecsOperator(BaseOperator):
@@ -1092,7 +1096,7 @@ class AutoMLTablesListTableSpecsOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        result = [MessageToDict(spec) for spec in page_iterator]
+        result = [TableSpec.to_dict(spec) for spec in page_iterator]
         self.log.info(result)
         self.log.info("Table specs obtained.")
         return result
@@ -1173,7 +1177,7 @@ class AutoMLListDatasetOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        result = [MessageToDict(dataset) for dataset in page_iterator]
+        result = [Dataset.to_dict(dataset) for dataset in page_iterator]
         self.log.info("Datasets obtained.")
 
         self.xcom_push(
diff --git a/setup.py b/setup.py
index 5314814..ff9e65d 100644
--- a/setup.py
+++ b/setup.py
@@ -283,7 +283,7 @@ google = [
     'google-api-python-client>=1.6.0,<2.0.0',
     'google-auth>=1.0.0,<2.0.0',
     'google-auth-httplib2>=0.0.1',
-    'google-cloud-automl>=0.4.0,<2.0.0',
+    'google-cloud-automl>=2.1.0,<3.0.0',
     'google-cloud-bigquery-datatransfer>=3.0.0,<4.0.0',
     'google-cloud-bigtable>=1.0.0,<2.0.0',
     'google-cloud-container>=0.1.1,<2.0.0',
diff --git a/tests/providers/google/cloud/hooks/test_automl.py b/tests/providers/google/cloud/hooks/test_automl.py
index 898001c..c9de712 100644
--- a/tests/providers/google/cloud/hooks/test_automl.py
+++ b/tests/providers/google/cloud/hooks/test_automl.py
@@ -19,7 +19,7 @@
 import unittest
 from unittest import mock
 
-from google.cloud.automl_v1beta1 import AutoMlClient, PredictionServiceClient
+from google.cloud.automl_v1beta1 import AutoMlClient
 
 from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
 from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id
@@ -38,9 +38,9 @@ MODEL = {
     "tables_model_metadata": {"train_budget_milli_node_hours": 1000},
 }
 
-LOCATION_PATH = AutoMlClient.location_path(GCP_PROJECT_ID, GCP_LOCATION)
-MODEL_PATH = PredictionServiceClient.model_path(GCP_PROJECT_ID, GCP_LOCATION, MODEL_ID)
-DATASET_PATH = AutoMlClient.dataset_path(GCP_PROJECT_ID, GCP_LOCATION, DATASET_ID)
+LOCATION_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}"
+MODEL_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}/models/{MODEL_ID}"
+DATASET_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}/datasets/{DATASET_ID}"
 
 INPUT_CONFIG = {"input": "value"}
 OUTPUT_CONFIG = {"output": "value"}
@@ -81,7 +81,7 @@ class TestAuoMLHook(unittest.TestCase):
         self.hook.create_model(model=MODEL, location=GCP_LOCATION, project_id=GCP_PROJECT_ID)
 
         mock_create_model.assert_called_once_with(
-            parent=LOCATION_PATH, model=MODEL, retry=None, timeout=None, metadata=None
+            request=dict(parent=LOCATION_PATH, model=MODEL), retry=None, timeout=None, metadata=()
         )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.PredictionServiceClient.batch_predict")
@@ -95,13 +95,12 @@ class TestAuoMLHook(unittest.TestCase):
         )
 
         mock_batch_predict.assert_called_once_with(
-            name=MODEL_PATH,
-            input_config=INPUT_CONFIG,
-            output_config=OUTPUT_CONFIG,
-            params=None,
+            request=dict(
+                name=MODEL_PATH, input_config=INPUT_CONFIG, output_config=OUTPUT_CONFIG, params=None
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.PredictionServiceClient.predict")
@@ -114,12 +113,10 @@ class TestAuoMLHook(unittest.TestCase):
         )
 
         mock_predict.assert_called_once_with(
-            name=MODEL_PATH,
-            payload=PAYLOAD,
-            params=None,
+            request=dict(name=MODEL_PATH, payload=PAYLOAD, params=None),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.create_dataset")
@@ -127,11 +124,10 @@ class TestAuoMLHook(unittest.TestCase):
         self.hook.create_dataset(dataset=DATASET, location=GCP_LOCATION, project_id=GCP_PROJECT_ID)
 
         mock_create_dataset.assert_called_once_with(
-            parent=LOCATION_PATH,
-            dataset=DATASET,
+            request=dict(parent=LOCATION_PATH, dataset=DATASET),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.import_data")
@@ -144,11 +140,10 @@ class TestAuoMLHook(unittest.TestCase):
         )
 
         mock_import_data.assert_called_once_with(
-            name=DATASET_PATH,
-            input_config=INPUT_CONFIG,
+            request=dict(name=DATASET_PATH, input_config=INPUT_CONFIG),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.list_column_specs")
@@ -169,26 +164,27 @@ class TestAuoMLHook(unittest.TestCase):
 
         parent = AutoMlClient.table_spec_path(GCP_PROJECT_ID, GCP_LOCATION, DATASET_ID, table_spec)
         mock_list_column_specs.assert_called_once_with(
-            parent=parent,
-            field_mask=MASK,
-            filter_=filter_,
-            page_size=page_size,
+            request=dict(parent=parent, field_mask=MASK, filter=filter_, page_size=page_size),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.get_model")
     def test_get_model(self, mock_get_model):
         self.hook.get_model(model_id=MODEL_ID, location=GCP_LOCATION, project_id=GCP_PROJECT_ID)
 
-        mock_get_model.assert_called_once_with(name=MODEL_PATH, retry=None, timeout=None, metadata=None)
+        mock_get_model.assert_called_once_with(
+            request=dict(name=MODEL_PATH), retry=None, timeout=None, metadata=()
+        )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.delete_model")
     def test_delete_model(self, mock_delete_model):
         self.hook.delete_model(model_id=MODEL_ID, location=GCP_LOCATION, project_id=GCP_PROJECT_ID)
 
-        mock_delete_model.assert_called_once_with(name=MODEL_PATH, retry=None, timeout=None, metadata=None)
+        mock_delete_model.assert_called_once_with(
+            request=dict(name=MODEL_PATH), retry=None, timeout=None, metadata=()
+        )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.update_dataset")
     def test_update_dataset(self, mock_update_dataset):
@@ -198,7 +194,7 @@ class TestAuoMLHook(unittest.TestCase):
         )
 
         mock_update_dataset.assert_called_once_with(
-            dataset=DATASET, update_mask=MASK, retry=None, timeout=None, metadata=None
+            request=dict(dataset=DATASET, update_mask=MASK), retry=None, timeout=None, metadata=()
         )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.deploy_model")
@@ -213,11 +209,13 @@ class TestAuoMLHook(unittest.TestCase):
         )
 
         mock_deploy_model.assert_called_once_with(
-            name=MODEL_PATH,
+            request=dict(
+                name=MODEL_PATH,
+                image_object_detection_model_deployment_metadata=image_detection_metadata,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
-            image_object_detection_model_deployment_metadata=image_detection_metadata,
+            metadata=(),
         )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.list_table_specs")
@@ -234,12 +232,10 @@ class TestAuoMLHook(unittest.TestCase):
         )
 
         mock_list_table_specs.assert_called_once_with(
-            parent=DATASET_PATH,
-            filter_=filter_,
-            page_size=page_size,
+            request=dict(parent=DATASET_PATH, filter=filter_, page_size=page_size),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.list_datasets")
@@ -247,7 +243,7 @@ class TestAuoMLHook(unittest.TestCase):
         self.hook.list_datasets(location=GCP_LOCATION, project_id=GCP_PROJECT_ID)
 
         mock_list_datasets.assert_called_once_with(
-            parent=LOCATION_PATH, retry=None, timeout=None, metadata=None
+            request=dict(parent=LOCATION_PATH), retry=None, timeout=None, metadata=()
         )
 
     @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.delete_dataset")
@@ -255,5 +251,5 @@ class TestAuoMLHook(unittest.TestCase):
         self.hook.delete_dataset(dataset_id=DATASET_ID, location=GCP_LOCATION, project_id=GCP_PROJECT_ID)
 
         mock_delete_dataset.assert_called_once_with(
-            name=DATASET_PATH, retry=None, timeout=None, metadata=None
+            request=dict(name=DATASET_PATH), retry=None, timeout=None, metadata=()
         )
diff --git a/tests/providers/google/cloud/operators/test_automl.py b/tests/providers/google/cloud/operators/test_automl.py
index 903600b..4c80703 100644
--- a/tests/providers/google/cloud/operators/test_automl.py
+++ b/tests/providers/google/cloud/operators/test_automl.py
@@ -20,8 +20,9 @@ import copy
 import unittest
 from unittest import mock
 
-from google.cloud.automl_v1beta1 import AutoMlClient, PredictionServiceClient
+from google.cloud.automl_v1beta1 import BatchPredictResult, Dataset, Model, PredictResponse
 
+from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
 from airflow.providers.google.cloud.operators.automl import (
     AutoMLBatchPredictOperator,
     AutoMLCreateDatasetOperator,
@@ -43,7 +44,7 @@ TASK_ID = "test-automl-hook"
 GCP_PROJECT_ID = "test-project"
 GCP_LOCATION = "test-location"
 MODEL_NAME = "test_model"
-MODEL_ID = "projects/198907790164/locations/us-central1/models/TBL9195602771183665152"
+MODEL_ID = "TBL9195602771183665152"
 DATASET_ID = "TBL123456789"
 MODEL = {
     "display_name": MODEL_NAME,
@@ -51,8 +52,9 @@ MODEL = {
     "tables_model_metadata": {"train_budget_milli_node_hours": 1000},
 }
 
-LOCATION_PATH = AutoMlClient.location_path(GCP_PROJECT_ID, GCP_LOCATION)
-MODEL_PATH = PredictionServiceClient.model_path(GCP_PROJECT_ID, GCP_LOCATION, MODEL_ID)
+LOCATION_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}"
+MODEL_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}/models/{MODEL_ID}"
+DATASET_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}/datasets/{DATASET_ID}"
 
 INPUT_CONFIG = {"input": "value"}
 OUTPUT_CONFIG = {"output": "value"}
@@ -60,12 +62,15 @@ PAYLOAD = {"test": "payload"}
 DATASET = {"dataset_id": "data"}
 MASK = {"field": "mask"}
 
+extract_object_id = CloudAutoMLHook.extract_object_id
+
 
 class TestAutoMLTrainModelOperator(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.automl.AutoMLTrainModelOperator.xcom_push")
     @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook")
     def test_execute(self, mock_hook, mock_xcom):
-        mock_hook.return_value.extract_object_id.return_value = MODEL_ID
+        mock_hook.return_value.create_model.return_value.result.return_value = Model(name=MODEL_PATH)
+        mock_hook.return_value.extract_object_id = extract_object_id
         op = AutoMLTrainModelOperator(
             model=MODEL,
             location=GCP_LOCATION,
@@ -87,6 +92,9 @@ class TestAutoMLTrainModelOperator(unittest.TestCase):
 class TestAutoMLBatchPredictOperator(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook")
     def test_execute(self, mock_hook):
+        mock_hook.return_value.batch_predict.return_value.result.return_value = BatchPredictResult()
+        mock_hook.return_value.extract_object_id = extract_object_id
+
         op = AutoMLBatchPredictOperator(
             model_id=MODEL_ID,
             location=GCP_LOCATION,
@@ -113,6 +121,8 @@ class TestAutoMLBatchPredictOperator(unittest.TestCase):
 class TestAutoMLPredictOperator(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook")
     def test_execute(self, mock_hook):
+        mock_hook.return_value.predict.return_value = PredictResponse()
+
         op = AutoMLPredictOperator(
             model_id=MODEL_ID,
             location=GCP_LOCATION,
@@ -137,7 +147,9 @@ class TestAutoMLCreateImportOperator(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.automl.AutoMLCreateDatasetOperator.xcom_push")
     @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook")
     def test_execute(self, mock_hook, mock_xcom):
-        mock_hook.return_value.extract_object_id.return_value = DATASET_ID
+        mock_hook.return_value.create_dataset.return_value = Dataset(name=DATASET_PATH)
+        mock_hook.return_value.extract_object_id = extract_object_id
+
         op = AutoMLCreateDatasetOperator(
             dataset=DATASET,
             location=GCP_LOCATION,
@@ -191,6 +203,8 @@ class TestAutoMLListColumnsSpecsOperator(unittest.TestCase):
 class TestAutoMLUpdateDatasetOperator(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook")
     def test_execute(self, mock_hook):
+        mock_hook.return_value.update_dataset.return_value = Dataset(name=DATASET_PATH)
+
         dataset = copy.deepcopy(DATASET)
         dataset["name"] = DATASET_ID
 
@@ -213,6 +227,9 @@ class TestAutoMLUpdateDatasetOperator(unittest.TestCase):
 class TestAutoMLGetModelOperator(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook")
     def test_execute(self, mock_hook):
+        mock_hook.return_value.get_model.return_value = Model(name=MODEL_PATH)
+        mock_hook.return_value.extract_object_id = extract_object_id
+
         op = AutoMLGetModelOperator(
             model_id=MODEL_ID,
             location=GCP_LOCATION,


[airflow] 26/41: Support google-cloud-datacatalog>=3.0.0 (#13534)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 02cb5e1af6c4d1b2823729d3f2801fd9d05bdf43
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Mon Jan 11 09:39:19 2021 +0100

    Support google-cloud-datacatalog>=3.0.0 (#13534)
    
    (cherry picked from commit 947dbb73bba736eb146f33117545a18fc2fd3c09)
---
 airflow/providers/google/ADDITIONAL_INFO.md        |   2 +-
 .../cloud/example_dags/example_datacatalog.py      |  10 +-
 .../providers/google/cloud/hooks/datacatalog.py    | 220 ++++++++++++-------
 .../google/cloud/operators/datacatalog.py          |  47 ++--
 setup.py                                           |   2 +-
 .../google/cloud/hooks/test_datacatalog.py         | 237 +++++++++++++--------
 .../google/cloud/operators/test_datacatalog.py     |  49 +++--
 7 files changed, 357 insertions(+), 210 deletions(-)

diff --git a/airflow/providers/google/ADDITIONAL_INFO.md b/airflow/providers/google/ADDITIONAL_INFO.md
index eca05df..d80f9e1 100644
--- a/airflow/providers/google/ADDITIONAL_INFO.md
+++ b/airflow/providers/google/ADDITIONAL_INFO.md
@@ -30,7 +30,7 @@ Details are covered in the UPDATING.md files for each library, but there are som
 | Library name | Previous constraints | Current constraints | |
 | --- | --- | --- | --- |
 | [``google-cloud-bigquery-datatransfer``](https://pypi.org/project/google-cloud-bigquery-datatransfer/) | ``>=0.4.0,<2.0.0`` | ``>=3.0.0,<4.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-bigquery-datatransfer/blob/master/UPGRADING.md) |
-| [``google-cloud-datacatalog``](https://pypi.org/project/google-cloud-datacatalog/) | ``>=0.5.0,<0.8`` | ``>=1.0.0,<2.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-datacatalog/blob/master/UPGRADING.md) |
+| [``google-cloud-datacatalog``](https://pypi.org/project/google-cloud-datacatalog/) | ``>=0.5.0,<0.8`` | ``>=3.0.0,<4.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-datacatalog/blob/master/UPGRADING.md) |
 | [``google-cloud-os-login``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-oslogin/blob/master/UPGRADING.md) |
 | [``google-cloud-pubsub``](https://pypi.org/project/google-cloud-pubsub/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-pubsub/blob/master/UPGRADING.md) |
 | [``google-cloud-kms``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-kms/blob/master/UPGRADING.md) |
diff --git a/airflow/providers/google/cloud/example_dags/example_datacatalog.py b/airflow/providers/google/cloud/example_dags/example_datacatalog.py
index c8597a6..cc4b73a 100644
--- a/airflow/providers/google/cloud/example_dags/example_datacatalog.py
+++ b/airflow/providers/google/cloud/example_dags/example_datacatalog.py
@@ -19,7 +19,7 @@
 """
 Example Airflow DAG that interacts with Google Data Catalog service
 """
-from google.cloud.datacatalog_v1beta1.proto.tags_pb2 import FieldType, TagField, TagTemplateField
+from google.cloud.datacatalog_v1beta1 import FieldType, TagField, TagTemplateField
 
 from airflow import models
 from airflow.operators.bash_operator import BashOperator
@@ -91,7 +91,7 @@ with models.DAG("example_gcp_datacatalog", start_date=days_ago(1), schedule_inte
         entry_id=ENTRY_ID,
         entry={
             "display_name": "Wizard",
-            "type": "FILESET",
+            "type_": "FILESET",
             "gcs_fileset_spec": {"file_patterns": ["gs://test-datacatalog/**"]},
         },
     )
@@ -144,7 +144,7 @@ with models.DAG("example_gcp_datacatalog", start_date=days_ago(1), schedule_inte
             "display_name": "Awesome Tag Template",
             "fields": {
                 FIELD_NAME_1: TagTemplateField(
-                    display_name="first-field", type=FieldType(primitive_type="STRING")
+                    display_name="first-field", type_=dict(primitive_type="STRING")
                 )
             },
         },
@@ -172,7 +172,7 @@ with models.DAG("example_gcp_datacatalog", start_date=days_ago(1), schedule_inte
         tag_template=TEMPLATE_ID,
         tag_template_field_id=FIELD_NAME_2,
         tag_template_field=TagTemplateField(
-            display_name="second-field", type=FieldType(primitive_type="STRING")
+            display_name="second-field", type_=FieldType(primitive_type="STRING")
         ),
     )
     # [END howto_operator_gcp_datacatalog_create_tag_template_field]
@@ -305,7 +305,7 @@ with models.DAG("example_gcp_datacatalog", start_date=days_ago(1), schedule_inte
     # [START howto_operator_gcp_datacatalog_lookup_entry_result]
     lookup_entry_result = BashOperator(
         task_id="lookup_entry_result",
-        bash_command="echo \"{{ task_instance.xcom_pull('lookup_entry')['displayName'] }}\"",
+        bash_command="echo \"{{ task_instance.xcom_pull('lookup_entry')['display_name'] }}\"",
     )
     # [END howto_operator_gcp_datacatalog_lookup_entry_result]
 
diff --git a/airflow/providers/google/cloud/hooks/datacatalog.py b/airflow/providers/google/cloud/hooks/datacatalog.py
index 70b488d..0d6cc75 100644
--- a/airflow/providers/google/cloud/hooks/datacatalog.py
+++ b/airflow/providers/google/cloud/hooks/datacatalog.py
@@ -18,16 +18,18 @@
 from typing import Dict, Optional, Sequence, Tuple, Union
 
 from google.api_core.retry import Retry
-from google.cloud.datacatalog_v1beta1 import DataCatalogClient
-from google.cloud.datacatalog_v1beta1.types import (
+from google.cloud import datacatalog
+from google.cloud.datacatalog_v1beta1 import (
+    CreateTagRequest,
+    DataCatalogClient,
     Entry,
     EntryGroup,
-    FieldMask,
     SearchCatalogRequest,
     Tag,
     TagTemplate,
     TagTemplateField,
 )
+from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow import AirflowException
 from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
@@ -115,10 +117,13 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        parent = DataCatalogClient.entry_group_path(project_id, location, entry_group)
+        parent = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}"
         self.log.info('Creating a new entry: parent=%s', parent)
         result = client.create_entry(
-            parent=parent, entry_id=entry_id, entry=entry, retry=retry, timeout=timeout, metadata=metadata
+            request={'parent': parent, 'entry_id': entry_id, 'entry': entry},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
         )
         self.log.info('Created a entry: name=%s', result.name)
         return result
@@ -161,16 +166,14 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        parent = DataCatalogClient.location_path(project_id, location)
+        parent = f"projects/{project_id}/locations/{location}"
         self.log.info('Creating a new entry group: parent=%s', parent)
 
         result = client.create_entry_group(
-            parent=parent,
-            entry_group_id=entry_group_id,
-            entry_group=entry_group,
+            request={'parent': parent, 'entry_group_id': entry_group_id, 'entry_group': entry_group},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         self.log.info('Created a entry group: name=%s', result.name)
 
@@ -218,15 +221,34 @@ class CloudDataCatalogHook(GoogleBaseHook):
         """
         client = self.get_conn()
         if template_id:
-            template_path = DataCatalogClient.tag_template_path(project_id, location, template_id)
+            template_path = f"projects/{project_id}/locations/{location}/tagTemplates/{template_id}"
             if isinstance(tag, Tag):
                 tag.template = template_path
             else:
                 tag["template"] = template_path
-        parent = DataCatalogClient.entry_path(project_id, location, entry_group, entry)
+        parent = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}"
 
         self.log.info('Creating a new tag: parent=%s', parent)
-        result = client.create_tag(parent=parent, tag=tag, retry=retry, timeout=timeout, metadata=metadata)
+        # HACK: google-cloud-datacatalog has problems with mapping messages where the value is not a
+        # primitive type, so we need to convert it manually.
+        # See: https://github.com/googleapis/python-datacatalog/issues/84
+        if isinstance(tag, dict):
+            tag = Tag(
+                name=tag.get('name'),
+                template=tag.get('template'),
+                template_display_name=tag.get('template_display_name'),
+                column=tag.get('column'),
+                fields={
+                    k: datacatalog.TagField(**v) if isinstance(v, dict) else v
+                    for k, v in tag.get("fields", {}).items()
+                },
+            )
+        request = CreateTagRequest(
+            parent=parent,
+            tag=tag,
+        )
+
+        result = client.create_tag(request=request, retry=retry, timeout=timeout, metadata=metadata or ())
         self.log.info('Created a tag: name=%s', result.name)
 
         return result
@@ -267,17 +289,30 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        parent = DataCatalogClient.location_path(project_id, location)
+        parent = f"projects/{project_id}/locations/{location}"
 
         self.log.info('Creating a new tag template: parent=%s', parent)
+        # HACK: google-cloud-datacatalog has problems with mapping messages where the value is not a
+        # primitive type, so we need to convert it manually.
+        # See: https://github.com/googleapis/python-datacatalog/issues/84
+        if isinstance(tag_template, dict):
+            tag_template = datacatalog.TagTemplate(
+                name=tag_template.get("name"),
+                display_name=tag_template.get("display_name"),
+                fields={
+                    k: datacatalog.TagTemplateField(**v) if isinstance(v, dict) else v
+                    for k, v in tag_template.get("fields", {}).items()
+                },
+            )
 
+        request = datacatalog.CreateTagTemplateRequest(
+            parent=parent, tag_template_id=tag_template_id, tag_template=tag_template
+        )
         result = client.create_tag_template(
-            parent=parent,
-            tag_template_id=tag_template_id,
-            tag_template=tag_template,
+            request=request,
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         self.log.info('Created a tag template: name=%s', result.name)
 
@@ -325,17 +360,19 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        parent = DataCatalogClient.tag_template_path(project_id, location, tag_template)
+        parent = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}"
 
         self.log.info('Creating a new tag template field: parent=%s', parent)
 
         result = client.create_tag_template_field(
-            parent=parent,
-            tag_template_field_id=tag_template_field_id,
-            tag_template_field=tag_template_field,
+            request={
+                'parent': parent,
+                'tag_template_field_id': tag_template_field_id,
+                'tag_template_field': tag_template_field,
+            },
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
         self.log.info('Created a tag template field: name=%s', result.name)
@@ -375,9 +412,9 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = DataCatalogClient.entry_path(project_id, location, entry_group, entry)
+        name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}"
         self.log.info('Deleting a entry: name=%s', name)
-        client.delete_entry(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        client.delete_entry(request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ())
         self.log.info('Deleted a entry: name=%s', name)
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -412,10 +449,12 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = DataCatalogClient.entry_group_path(project_id, location, entry_group)
+        name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}"
 
         self.log.info('Deleting a entry group: name=%s', name)
-        client.delete_entry_group(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        client.delete_entry_group(
+            request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
         self.log.info('Deleted a entry group: name=%s', name)
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -454,10 +493,12 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = DataCatalogClient.tag_path(project_id, location, entry_group, entry, tag)
+        name = (
+            f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}"
+        )
 
         self.log.info('Deleting a tag: name=%s', name)
-        client.delete_tag(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        client.delete_tag(request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ())
         self.log.info('Deleted a tag: name=%s', name)
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -495,10 +536,12 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = DataCatalogClient.tag_template_path(project_id, location, tag_template)
+        name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}"
 
         self.log.info('Deleting a tag template: name=%s', name)
-        client.delete_tag_template(name=name, force=force, retry=retry, timeout=timeout, metadata=metadata)
+        client.delete_tag_template(
+            request={'name': name, 'force': force}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
         self.log.info('Deleted a tag template: name=%s', name)
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -537,11 +580,11 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = DataCatalogClient.tag_template_field_path(project_id, location, tag_template, field)
+        name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field}"
 
         self.log.info('Deleting a tag template field: name=%s', name)
         client.delete_tag_template_field(
-            name=name, force=force, retry=retry, timeout=timeout, metadata=metadata
+            request={'name': name, 'force': force}, retry=retry, timeout=timeout, metadata=metadata or ()
         )
         self.log.info('Deleted a tag template field: name=%s', name)
 
@@ -578,10 +621,12 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = DataCatalogClient.entry_path(project_id, location, entry_group, entry)
+        name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}"
 
         self.log.info('Getting a entry: name=%s', name)
-        result = client.get_entry(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        result = client.get_entry(
+            request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
         self.log.info('Received a entry: name=%s', result.name)
 
         return result
@@ -607,8 +652,8 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :param read_mask: The fields to return. If not set or empty, all fields are returned.
 
             If a dict is provided, it must be of the same form as the protobuf message
-            :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask`
-        :type read_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask]
+            :class:`~google.protobuf.field_mask_pb2.FieldMask`
+        :type read_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
         :param project_id: The ID of the Google Cloud project that owns the entry group.
             If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
         :type project_id: str
@@ -622,12 +667,15 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = DataCatalogClient.entry_group_path(project_id, location, entry_group)
+        name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}"
 
         self.log.info('Getting a entry group: name=%s', name)
 
         result = client.get_entry_group(
-            name=name, read_mask=read_mask, retry=retry, timeout=timeout, metadata=metadata
+            request={'name': name, 'read_mask': read_mask},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
         )
 
         self.log.info('Received a entry group: name=%s', result.name)
@@ -664,11 +712,13 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = DataCatalogClient.tag_template_path(project_id, location, tag_template)
+        name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}"
 
         self.log.info('Getting a tag template: name=%s', name)
 
-        result = client.get_tag_template(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        result = client.get_tag_template(
+            request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
 
         self.log.info('Received a tag template: name=%s', result.name)
 
@@ -712,12 +762,15 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        parent = DataCatalogClient.entry_path(project_id, location, entry_group, entry)
+        parent = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}"
 
         self.log.info('Listing tag on entry: entry_name=%s', parent)
 
         result = client.list_tags(
-            parent=parent, page_size=page_size, retry=retry, timeout=timeout, metadata=metadata
+            request={'parent': parent, 'page_size': page_size},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
         )
 
         self.log.info('Received tags.')
@@ -811,12 +864,18 @@ class CloudDataCatalogHook(GoogleBaseHook):
         if linked_resource:
             self.log.info('Getting entry: linked_resource=%s', linked_resource)
             result = client.lookup_entry(
-                linked_resource=linked_resource, retry=retry, timeout=timeout, metadata=metadata
+                request={'linked_resource': linked_resource},
+                retry=retry,
+                timeout=timeout,
+                metadata=metadata or (),
             )
         else:
             self.log.info('Getting entry: sql_resource=%s', sql_resource)
             result = client.lookup_entry(
-                sql_resource=sql_resource, retry=retry, timeout=timeout, metadata=metadata
+                request={'sql_resource': sql_resource},
+                retry=retry,
+                timeout=timeout,
+                metadata=metadata or (),
             )
         self.log.info('Received entry. name=%s', result.name)
 
@@ -860,18 +919,17 @@ class CloudDataCatalogHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]
         """
         client = self.get_conn()
-        name = DataCatalogClient.tag_template_field_path(project_id, location, tag_template, field)
+        name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field}"
 
         self.log.info(
             'Renaming field: old_name=%s, new_tag_template_field_id=%s', name, new_tag_template_field_id
         )
 
         result = client.rename_tag_template_field(
-            name=name,
-            new_tag_template_field_id=new_tag_template_field_id,
+            request={'name': name, 'new_tag_template_field_id': new_tag_template_field_id},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
         self.log.info('Renamed tag template field.')
@@ -946,13 +1004,10 @@ class CloudDataCatalogHook(GoogleBaseHook):
             order_by,
         )
         result = client.search_catalog(
-            scope=scope,
-            query=query,
-            page_size=page_size,
-            order_by=order_by,
+            request={'scope': scope, 'query': query, 'page_size': page_size, 'order_by': order_by},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
         self.log.info('Received items.')
@@ -984,8 +1039,8 @@ class CloudDataCatalogHook(GoogleBaseHook):
             updated.
 
             If a dict is provided, it must be of the same form as the protobuf message
-            :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask`
-        :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask]
+            :class:`~google.protobuf.field_mask_pb2.FieldMask`
+        :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
         :param location: Required. The location of the entry to update.
         :type location: str
         :param entry_group: The entry group ID for the entry that is being updated.
@@ -1006,7 +1061,9 @@ class CloudDataCatalogHook(GoogleBaseHook):
         """
         client = self.get_conn()
         if project_id and location and entry_group and entry_id:
-            full_entry_name = DataCatalogClient.entry_path(project_id, location, entry_group, entry_id)
+            full_entry_name = (
+                f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry_id}"
+            )
             if isinstance(entry, Entry):
                 entry.name = full_entry_name
             elif isinstance(entry, dict):
@@ -1025,7 +1082,10 @@ class CloudDataCatalogHook(GoogleBaseHook):
         if isinstance(entry, dict):
             entry = Entry(**entry)
         result = client.update_entry(
-            entry=entry, update_mask=update_mask, retry=retry, timeout=timeout, metadata=metadata
+            request={'entry': entry, 'update_mask': update_mask},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
         )
 
         self.log.info('Updated entry.')
@@ -1059,7 +1119,7 @@ class CloudDataCatalogHook(GoogleBaseHook):
 
             If a dict is provided, it must be of the same form as the protobuf message
             :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask`
-        :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask]
+        :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
         :param location: Required. The location of the tag to rename.
         :type location: str
         :param entry_group: The entry group ID for the tag that is being updated.
@@ -1082,7 +1142,10 @@ class CloudDataCatalogHook(GoogleBaseHook):
         """
         client = self.get_conn()
         if project_id and location and entry_group and entry and tag_id:
-            full_tag_name = DataCatalogClient.tag_path(project_id, location, entry_group, entry, tag_id)
+            full_tag_name = (
+                f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}"
+                f"/tags/{tag_id}"
+            )
             if isinstance(tag, Tag):
                 tag.name = full_tag_name
             elif isinstance(tag, dict):
@@ -1102,7 +1165,10 @@ class CloudDataCatalogHook(GoogleBaseHook):
         if isinstance(tag, dict):
             tag = Tag(**tag)
         result = client.update_tag(
-            tag=tag, update_mask=update_mask, retry=retry, timeout=timeout, metadata=metadata
+            request={'tag': tag, 'update_mask': update_mask},
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata or (),
         )
         self.log.info('Updated tag.')
 
@@ -1137,8 +1203,8 @@ class CloudDataCatalogHook(GoogleBaseHook):
             If absent or empty, all of the allowed fields above will be updated.
 
             If a dict is provided, it must be of the same form as the protobuf message
-            :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask`
-        :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask]
+            :class:`~google.protobuf.field_mask_pb2.FieldMask`
+        :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
         :param location: Required. The location of the tag template to rename.
         :type location: str
         :param tag_template_id: Optional. The tag template ID for the entry that is being updated.
@@ -1157,8 +1223,8 @@ class CloudDataCatalogHook(GoogleBaseHook):
         """
         client = self.get_conn()
         if project_id and location and tag_template:
-            full_tag_template_name = DataCatalogClient.tag_template_path(
-                project_id, location, tag_template_id
+            full_tag_template_name = (
+                f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}"
             )
             if isinstance(tag_template, TagTemplate):
                 tag_template.name = full_tag_template_name
@@ -1179,11 +1245,10 @@ class CloudDataCatalogHook(GoogleBaseHook):
         if isinstance(tag_template, dict):
             tag_template = TagTemplate(**tag_template)
         result = client.update_tag_template(
-            tag_template=tag_template,
-            update_mask=update_mask,
+            request={'tag_template': tag_template, 'update_mask': update_mask},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         self.log.info('Updated tag template.')
 
@@ -1222,8 +1287,8 @@ class CloudDataCatalogHook(GoogleBaseHook):
             Therefore, enum values can only be added, existing enum values cannot be deleted nor renamed.
 
             If a dict is provided, it must be of the same form as the protobuf message
-            :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask`
-        :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask]
+            :class:`~google.protobuf.field_mask_pb2.FieldMask`
+        :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
         :param tag_template_field_name: Optional. The name of the tag template field to rename.
         :type tag_template_field_name: str
         :param location: Optional. The location of the tag to rename.
@@ -1246,19 +1311,22 @@ class CloudDataCatalogHook(GoogleBaseHook):
         """
         client = self.get_conn()
         if project_id and location and tag_template and tag_template_field_id:
-            tag_template_field_name = DataCatalogClient.tag_template_field_path(
-                project_id, location, tag_template, tag_template_field_id
+            tag_template_field_name = (
+                f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}"
+                f"/fields/{tag_template_field_id}"
             )
 
         self.log.info("Updating tag template field: name=%s", tag_template_field_name)
 
         result = client.update_tag_template_field(
-            name=tag_template_field_name,
-            tag_template_field=tag_template_field,
-            update_mask=update_mask,
+            request={
+                'name': tag_template_field_name,
+                'tag_template_field': tag_template_field,
+                'update_mask': update_mask,
+            },
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         self.log.info('Updated tag template field.')
 
diff --git a/airflow/providers/google/cloud/operators/datacatalog.py b/airflow/providers/google/cloud/operators/datacatalog.py
index 00b2765..4b0da05 100644
--- a/airflow/providers/google/cloud/operators/datacatalog.py
+++ b/airflow/providers/google/cloud/operators/datacatalog.py
@@ -19,17 +19,16 @@ from typing import Dict, Optional, Sequence, Tuple, Union
 
 from google.api_core.exceptions import AlreadyExists, NotFound
 from google.api_core.retry import Retry
-from google.cloud.datacatalog_v1beta1 import DataCatalogClient
+from google.cloud.datacatalog_v1beta1 import DataCatalogClient, SearchCatalogResult
 from google.cloud.datacatalog_v1beta1.types import (
     Entry,
     EntryGroup,
-    FieldMask,
     SearchCatalogRequest,
     Tag,
     TagTemplate,
     TagTemplateField,
 )
-from google.protobuf.json_format import MessageToDict
+from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow.models import BaseOperator
 from airflow.providers.google.cloud.hooks.datacatalog import CloudDataCatalogHook
@@ -153,7 +152,7 @@ class CloudDataCatalogCreateEntryOperator(BaseOperator):
         _, _, entry_id = result.name.rpartition("/")
         self.log.info("Current entry_id ID: %s", entry_id)
         context["task_instance"].xcom_push(key="entry_id", value=entry_id)
-        return MessageToDict(result)
+        return Entry.to_dict(result)
 
 
 class CloudDataCatalogCreateEntryGroupOperator(BaseOperator):
@@ -268,7 +267,7 @@ class CloudDataCatalogCreateEntryGroupOperator(BaseOperator):
         _, _, entry_group_id = result.name.rpartition("/")
         self.log.info("Current entry group ID: %s", entry_group_id)
         context["task_instance"].xcom_push(key="entry_group_id", value=entry_group_id)
-        return MessageToDict(result)
+        return EntryGroup.to_dict(result)
 
 
 class CloudDataCatalogCreateTagOperator(BaseOperator):
@@ -404,7 +403,7 @@ class CloudDataCatalogCreateTagOperator(BaseOperator):
         _, _, tag_id = tag.name.rpartition("/")
         self.log.info("Current Tag ID: %s", tag_id)
         context["task_instance"].xcom_push(key="tag_id", value=tag_id)
-        return MessageToDict(tag)
+        return Tag.to_dict(tag)
 
 
 class CloudDataCatalogCreateTagTemplateOperator(BaseOperator):
@@ -516,7 +515,7 @@ class CloudDataCatalogCreateTagTemplateOperator(BaseOperator):
         _, _, tag_template = result.name.rpartition("/")
         self.log.info("Current Tag ID: %s", tag_template)
         context["task_instance"].xcom_push(key="tag_template_id", value=tag_template)
-        return MessageToDict(result)
+        return TagTemplate.to_dict(result)
 
 
 class CloudDataCatalogCreateTagTemplateFieldOperator(BaseOperator):
@@ -638,7 +637,7 @@ class CloudDataCatalogCreateTagTemplateFieldOperator(BaseOperator):
 
         self.log.info("Current Tag ID: %s", self.tag_template_field_id)
         context["task_instance"].xcom_push(key="tag_template_field_id", value=self.tag_template_field_id)
-        return MessageToDict(result)
+        return TagTemplateField.to_dict(result)
 
 
 class CloudDataCatalogDeleteEntryOperator(BaseOperator):
@@ -1216,7 +1215,7 @@ class CloudDataCatalogGetEntryOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(result)
+        return Entry.to_dict(result)
 
 
 class CloudDataCatalogGetEntryGroupOperator(BaseOperator):
@@ -1234,8 +1233,8 @@ class CloudDataCatalogGetEntryGroupOperator(BaseOperator):
     :param read_mask: The fields to return. If not set or empty, all fields are returned.
 
         If a dict is provided, it must be of the same form as the protobuf message
-        :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask`
-    :type read_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask]
+        :class:`~google.protobuf.field_mask_pb2.FieldMask`
+    :type read_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
     :param project_id: The ID of the Google Cloud project that owns the entry group.
         If set to ``None`` or missing, the default project_id from the Google Cloud connection is used.
     :type project_id: Optional[str]
@@ -1312,7 +1311,7 @@ class CloudDataCatalogGetEntryGroupOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(result)
+        return EntryGroup.to_dict(result)
 
 
 class CloudDataCatalogGetTagTemplateOperator(BaseOperator):
@@ -1399,7 +1398,7 @@ class CloudDataCatalogGetTagTemplateOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(result)
+        return TagTemplate.to_dict(result)
 
 
 class CloudDataCatalogListTagsOperator(BaseOperator):
@@ -1501,7 +1500,7 @@ class CloudDataCatalogListTagsOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return [MessageToDict(item) for item in result]
+        return [Tag.to_dict(item) for item in result]
 
 
 class CloudDataCatalogLookupEntryOperator(BaseOperator):
@@ -1589,7 +1588,7 @@ class CloudDataCatalogLookupEntryOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(result)
+        return Entry.to_dict(result)
 
 
 class CloudDataCatalogRenameTagTemplateFieldOperator(BaseOperator):
@@ -1809,7 +1808,7 @@ class CloudDataCatalogSearchCatalogOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return [MessageToDict(item) for item in result]
+        return [SearchCatalogResult.to_dict(item) for item in result]
 
 
 class CloudDataCatalogUpdateEntryOperator(BaseOperator):
@@ -1829,8 +1828,8 @@ class CloudDataCatalogUpdateEntryOperator(BaseOperator):
         updated.
 
         If a dict is provided, it must be of the same form as the protobuf message
-        :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask`
-    :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask]
+        :class:`~google.protobuf.field_mask_pb2.FieldMask`
+    :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
     :param location: Required. The location of the entry to update.
     :type location: str
     :param entry_group: The entry group ID for the entry that is being updated.
@@ -1940,8 +1939,8 @@ class CloudDataCatalogUpdateTagOperator(BaseOperator):
         updated. Currently the only modifiable field is the field ``fields``.
 
         If a dict is provided, it must be of the same form as the protobuf message
-        :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask`
-    :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask]
+        :class:`~google.protobuf.field_mask_pb2.FieldMask`
+    :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
     :param location: Required. The location of the tag to rename.
     :type location: str
     :param entry_group: The entry group ID for the tag that is being updated.
@@ -2060,8 +2059,8 @@ class CloudDataCatalogUpdateTagTemplateOperator(BaseOperator):
         If absent or empty, all of the allowed fields above will be updated.
 
         If a dict is provided, it must be of the same form as the protobuf message
-        :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask`
-    :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask]
+        :class:`~google.protobuf.field_mask_pb2.FieldMask`
+    :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
     :param location: Required. The location of the tag template to rename.
     :type location: str
     :param tag_template_id: Optional. The tag template ID for the entry that is being updated.
@@ -2172,8 +2171,8 @@ class CloudDataCatalogUpdateTagTemplateFieldOperator(BaseOperator):
         Therefore, enum values can only be added, existing enum values cannot be deleted nor renamed.
 
         If a dict is provided, it must be of the same form as the protobuf message
-        :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask`
-    :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask]
+        :class:`~google.protobuf.field_mask_pb2.FieldMask`
+    :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask]
     :param tag_template_field_name: Optional. The name of the tag template field to rename.
     :type tag_template_field_name: str
     :param location: Optional. The location of the tag to rename.
diff --git a/setup.py b/setup.py
index 75f5db5..5314814 100644
--- a/setup.py
+++ b/setup.py
@@ -287,7 +287,7 @@ google = [
     'google-cloud-bigquery-datatransfer>=3.0.0,<4.0.0',
     'google-cloud-bigtable>=1.0.0,<2.0.0',
     'google-cloud-container>=0.1.1,<2.0.0',
-    'google-cloud-datacatalog>=1.0.0,<2.0.0',
+    'google-cloud-datacatalog>=3.0.0,<4.0.0',
     'google-cloud-dataproc>=1.0.1,<2.0.0',
     'google-cloud-dlp>=0.11.0,<2.0.0',
     'google-cloud-kms>=2.0.0,<3.0.0',
diff --git a/tests/providers/google/cloud/hooks/test_datacatalog.py b/tests/providers/google/cloud/hooks/test_datacatalog.py
index f5192c5..99d785f 100644
--- a/tests/providers/google/cloud/hooks/test_datacatalog.py
+++ b/tests/providers/google/cloud/hooks/test_datacatalog.py
@@ -22,6 +22,7 @@ from unittest import TestCase, mock
 
 import pytest
 from google.api_core.retry import Retry
+from google.cloud.datacatalog_v1beta1 import CreateTagRequest, CreateTagTemplateRequest
 from google.cloud.datacatalog_v1beta1.types import Entry, Tag, TagTemplate
 
 from airflow import AirflowException
@@ -38,7 +39,7 @@ TEST_ENTRY_ID: str = "test-entry-id"
 TEST_ENTRY: Dict = {}
 TEST_RETRY: Retry = Retry()
 TEST_TIMEOUT: float = 4
-TEST_METADATA: Sequence[Tuple[str, str]] = []
+TEST_METADATA: Sequence[Tuple[str, str]] = ()
 TEST_ENTRY_GROUP_ID: str = "test-entry-group-id"
 TEST_ENTRY_GROUP: Dict = {}
 TEST_TAG: Dict = {}
@@ -102,7 +103,7 @@ class TestCloudDataCatalog(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.lookup_entry.assert_called_once_with(
-            linked_resource=TEST_LINKED_RESOURCE,
+            request=dict(linked_resource=TEST_LINKED_RESOURCE),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -118,7 +119,10 @@ class TestCloudDataCatalog(TestCase):
             sql_resource=TEST_SQL_RESOURCE, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA
         )
         mock_get_conn.return_value.lookup_entry.assert_called_once_with(
-            sql_resource=TEST_SQL_RESOURCE, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA
+            request=dict(sql_resource=TEST_SQL_RESOURCE),
+            retry=TEST_RETRY,
+            timeout=TEST_TIMEOUT,
+            metadata=TEST_METADATA,
         )
 
     @mock.patch(
@@ -148,10 +152,9 @@ class TestCloudDataCatalog(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.search_catalog.assert_called_once_with(
-            scope=TEST_SCOPE,
-            query=TEST_QUERY,
-            page_size=TEST_PAGE_SIZE,
-            order_by=TEST_ORDER_BY,
+            request=dict(
+                scope=TEST_SCOPE, query=TEST_QUERY, page_size=TEST_PAGE_SIZE, order_by=TEST_ORDER_BY
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -184,9 +187,11 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_entry.assert_called_once_with(
-            parent=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1),
-            entry_id=TEST_ENTRY_ID,
-            entry=TEST_ENTRY,
+            request=dict(
+                parent=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1),
+                entry_id=TEST_ENTRY_ID,
+                entry=TEST_ENTRY,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -207,9 +212,11 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_entry_group.assert_called_once_with(
-            parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_1),
-            entry_group_id=TEST_ENTRY_GROUP_ID,
-            entry_group=TEST_ENTRY_GROUP,
+            request=dict(
+                parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_1),
+                entry_group_id=TEST_ENTRY_GROUP_ID,
+                entry_group=TEST_ENTRY_GROUP,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -232,8 +239,10 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_tag.assert_called_once_with(
-            parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
-            tag={"template": TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)},
+            request=CreateTagRequest(
+                parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
+                tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)),
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -256,8 +265,10 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_tag.assert_called_once_with(
-            parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
-            tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)),
+            request=CreateTagRequest(
+                parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
+                tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)),
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -278,9 +289,11 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_tag_template.assert_called_once_with(
-            parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_1),
-            tag_template_id=TEST_TAG_TEMPLATE_ID,
-            tag_template=TEST_TAG_TEMPLATE,
+            request=CreateTagTemplateRequest(
+                parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_1),
+                tag_template_id=TEST_TAG_TEMPLATE_ID,
+                tag_template=TEST_TAG_TEMPLATE,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -302,9 +315,11 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_tag_template_field.assert_called_once_with(
-            parent=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1),
-            tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID,
-            tag_template_field=TEST_TAG_TEMPLATE_FIELD,
+            request=dict(
+                parent=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1),
+                tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID,
+                tag_template_field=TEST_TAG_TEMPLATE_FIELD,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -325,7 +340,9 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_entry.assert_called_once_with(
-            name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
+            request=dict(
+                name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -345,7 +362,9 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_entry_group.assert_called_once_with(
-            name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1),
+            request=dict(
+                name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1),
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -367,7 +386,9 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_tag.assert_called_once_with(
-            name=TEST_TAG_PATH.format(TEST_PROJECT_ID_1),
+            request=dict(
+                name=TEST_TAG_PATH.format(TEST_PROJECT_ID_1),
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -388,8 +409,7 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_tag_template.assert_called_once_with(
-            name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1),
-            force=TEST_FORCE,
+            request=dict(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1), force=TEST_FORCE),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -411,8 +431,10 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_tag_template_field.assert_called_once_with(
-            name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1),
-            force=TEST_FORCE,
+            request=dict(
+                name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1),
+                force=TEST_FORCE,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -433,7 +455,9 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.get_entry.assert_called_once_with(
-            name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
+            request=dict(
+                name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -454,8 +478,10 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.get_entry_group.assert_called_once_with(
-            name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1),
-            read_mask=TEST_READ_MASK,
+            request=dict(
+                name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1),
+                read_mask=TEST_READ_MASK,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -475,7 +501,9 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.get_tag_template.assert_called_once_with(
-            name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1),
+            request=dict(
+                name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1),
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -497,8 +525,10 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.list_tags.assert_called_once_with(
-            parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
-            page_size=TEST_PAGE_SIZE,
+            request=dict(
+                parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
+                page_size=TEST_PAGE_SIZE,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -524,8 +554,10 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.list_tags.assert_called_once_with(
-            parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
-            page_size=100,
+            request=dict(
+                parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1),
+                page_size=100,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -548,8 +580,10 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.rename_tag_template_field.assert_called_once_with(
-            name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1),
-            new_tag_template_field_id=TEST_NEW_TAG_TEMPLATE_FIELD_ID,
+            request=dict(
+                name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1),
+                new_tag_template_field_id=TEST_NEW_TAG_TEMPLATE_FIELD_ID,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -572,8 +606,10 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.update_entry.assert_called_once_with(
-            entry=Entry(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1)),
-            update_mask=TEST_UPDATE_MASK,
+            request=dict(
+                entry=Entry(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1)),
+                update_mask=TEST_UPDATE_MASK,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -597,8 +633,7 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.update_tag.assert_called_once_with(
-            tag=Tag(name=TEST_TAG_PATH.format(TEST_PROJECT_ID_1)),
-            update_mask=TEST_UPDATE_MASK,
+            request=dict(tag=Tag(name=TEST_TAG_PATH.format(TEST_PROJECT_ID_1)), update_mask=TEST_UPDATE_MASK),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -620,8 +655,10 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.update_tag_template.assert_called_once_with(
-            tag_template=TagTemplate(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)),
-            update_mask=TEST_UPDATE_MASK,
+            request=dict(
+                tag_template=TagTemplate(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)),
+                update_mask=TEST_UPDATE_MASK,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -644,9 +681,11 @@ class TestCloudDataCatalogWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.update_tag_template_field.assert_called_once_with(
-            name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1),
-            tag_template_field=TEST_TAG_TEMPLATE_FIELD,
-            update_mask=TEST_UPDATE_MASK,
+            request=dict(
+                name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1),
+                tag_template_field=TEST_TAG_TEMPLATE_FIELD,
+                update_mask=TEST_UPDATE_MASK,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -680,9 +719,11 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_entry.assert_called_once_with(
-            parent=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2),
-            entry_id=TEST_ENTRY_ID,
-            entry=TEST_ENTRY,
+            request=dict(
+                parent=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2),
+                entry_id=TEST_ENTRY_ID,
+                entry=TEST_ENTRY,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -704,9 +745,11 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_entry_group.assert_called_once_with(
-            parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_2),
-            entry_group_id=TEST_ENTRY_GROUP_ID,
-            entry_group=TEST_ENTRY_GROUP,
+            request=dict(
+                parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_2),
+                entry_group_id=TEST_ENTRY_GROUP_ID,
+                entry_group=TEST_ENTRY_GROUP,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -730,8 +773,10 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_tag.assert_called_once_with(
-            parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2),
-            tag={"template": TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)},
+            request=CreateTagRequest(
+                parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2),
+                tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)),
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -755,8 +800,10 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_tag.assert_called_once_with(
-            parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2),
-            tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)),
+            request=CreateTagRequest(
+                parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2),
+                tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)),
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -778,9 +825,11 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_tag_template.assert_called_once_with(
-            parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_2),
-            tag_template_id=TEST_TAG_TEMPLATE_ID,
-            tag_template=TEST_TAG_TEMPLATE,
+            request=CreateTagTemplateRequest(
+                parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_2),
+                tag_template_id=TEST_TAG_TEMPLATE_ID,
+                tag_template=TEST_TAG_TEMPLATE,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -803,9 +852,11 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.create_tag_template_field.assert_called_once_with(
-            parent=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2),
-            tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID,
-            tag_template_field=TEST_TAG_TEMPLATE_FIELD,
+            request=dict(
+                parent=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2),
+                tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID,
+                tag_template_field=TEST_TAG_TEMPLATE_FIELD,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -827,7 +878,7 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_entry.assert_called_once_with(
-            name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2),
+            request=dict(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2)),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -848,7 +899,7 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_entry_group.assert_called_once_with(
-            name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2),
+            request=dict(name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2)),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -871,7 +922,7 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_tag.assert_called_once_with(
-            name=TEST_TAG_PATH.format(TEST_PROJECT_ID_2),
+            request=dict(name=TEST_TAG_PATH.format(TEST_PROJECT_ID_2)),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -893,8 +944,7 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_tag_template.assert_called_once_with(
-            name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2),
-            force=TEST_FORCE,
+            request=dict(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2), force=TEST_FORCE),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -917,8 +967,7 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.delete_tag_template_field.assert_called_once_with(
-            name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2),
-            force=TEST_FORCE,
+            request=dict(name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2), force=TEST_FORCE),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -940,7 +989,7 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.get_entry.assert_called_once_with(
-            name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2),
+            request=dict(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2)),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -962,8 +1011,10 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.get_entry_group.assert_called_once_with(
-            name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2),
-            read_mask=TEST_READ_MASK,
+            request=dict(
+                name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2),
+                read_mask=TEST_READ_MASK,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -984,7 +1035,7 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.get_tag_template.assert_called_once_with(
-            name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2),
+            request=dict(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -1007,8 +1058,7 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.list_tags.assert_called_once_with(
-            parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2),
-            page_size=TEST_PAGE_SIZE,
+            request=dict(parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), page_size=TEST_PAGE_SIZE),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -1035,8 +1085,7 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.list_tags.assert_called_once_with(
-            parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2),
-            page_size=100,
+            request=dict(parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), page_size=100),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -1060,8 +1109,10 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.rename_tag_template_field.assert_called_once_with(
-            name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2),
-            new_tag_template_field_id=TEST_NEW_TAG_TEMPLATE_FIELD_ID,
+            request=dict(
+                name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2),
+                new_tag_template_field_id=TEST_NEW_TAG_TEMPLATE_FIELD_ID,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -1085,8 +1136,9 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.update_entry.assert_called_once_with(
-            entry=Entry(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2)),
-            update_mask=TEST_UPDATE_MASK,
+            request=dict(
+                entry=Entry(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2)), update_mask=TEST_UPDATE_MASK
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -1111,8 +1163,7 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.update_tag.assert_called_once_with(
-            tag=Tag(name=TEST_TAG_PATH.format(TEST_PROJECT_ID_2)),
-            update_mask=TEST_UPDATE_MASK,
+            request=dict(tag=Tag(name=TEST_TAG_PATH.format(TEST_PROJECT_ID_2)), update_mask=TEST_UPDATE_MASK),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -1135,8 +1186,10 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.update_tag_template.assert_called_once_with(
-            tag_template=TagTemplate(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)),
-            update_mask=TEST_UPDATE_MASK,
+            request=dict(
+                tag_template=TagTemplate(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)),
+                update_mask=TEST_UPDATE_MASK,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -1160,9 +1213,11 @@ class TestCloudDataCatalogWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.update_tag_template_field.assert_called_once_with(
-            name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2),
-            tag_template_field=TEST_TAG_TEMPLATE_FIELD,
-            update_mask=TEST_UPDATE_MASK,
+            request=dict(
+                name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2),
+                tag_template_field=TEST_TAG_TEMPLATE_FIELD,
+                update_mask=TEST_UPDATE_MASK,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
diff --git a/tests/providers/google/cloud/operators/test_datacatalog.py b/tests/providers/google/cloud/operators/test_datacatalog.py
index b575dd4..517b35c 100644
--- a/tests/providers/google/cloud/operators/test_datacatalog.py
+++ b/tests/providers/google/cloud/operators/test_datacatalog.py
@@ -87,15 +87,25 @@ TEST_TAG_PATH: str = (
 )
 
 TEST_ENTRY: Entry = Entry(name=TEST_ENTRY_PATH)
-TEST_ENTRY_DICT: Dict = dict(name=TEST_ENTRY_PATH)
+TEST_ENTRY_DICT: Dict = {
+    'description': '',
+    'display_name': '',
+    'linked_resource': '',
+    'name': TEST_ENTRY_PATH,
+}
 TEST_ENTRY_GROUP: EntryGroup = EntryGroup(name=TEST_ENTRY_GROUP_PATH)
-TEST_ENTRY_GROUP_DICT: Dict = dict(name=TEST_ENTRY_GROUP_PATH)
-TEST_TAG: EntryGroup = Tag(name=TEST_TAG_PATH)
-TEST_TAG_DICT: Dict = dict(name=TEST_TAG_PATH)
+TEST_ENTRY_GROUP_DICT: Dict = {'description': '', 'display_name': '', 'name': TEST_ENTRY_GROUP_PATH}
+TEST_TAG: Tag = Tag(name=TEST_TAG_PATH)
+TEST_TAG_DICT: Dict = {'fields': {}, 'name': TEST_TAG_PATH, 'template': '', 'template_display_name': ''}
 TEST_TAG_TEMPLATE: TagTemplate = TagTemplate(name=TEST_TAG_TEMPLATE_PATH)
-TEST_TAG_TEMPLATE_DICT: Dict = dict(name=TEST_TAG_TEMPLATE_PATH)
-TEST_TAG_TEMPLATE_FIELD: Dict = TagTemplateField(name=TEST_TAG_TEMPLATE_FIELD_ID)
-TEST_TAG_TEMPLATE_FIELD_DICT: Dict = dict(name=TEST_TAG_TEMPLATE_FIELD_ID)
+TEST_TAG_TEMPLATE_DICT: Dict = {'display_name': '', 'fields': {}, 'name': TEST_TAG_TEMPLATE_PATH}
+TEST_TAG_TEMPLATE_FIELD: TagTemplateField = TagTemplateField(name=TEST_TAG_TEMPLATE_FIELD_ID)
+TEST_TAG_TEMPLATE_FIELD_DICT: Dict = {
+    'display_name': '',
+    'is_required': False,
+    'name': TEST_TAG_TEMPLATE_FIELD_ID,
+    'order': 0,
+}
 
 
 class TestCloudDataCatalogCreateEntryOperator(TestCase):
@@ -498,7 +508,10 @@ class TestCloudDataCatalogDeleteTagTemplateFieldOperator(TestCase):
 
 
 class TestCloudDataCatalogGetEntryOperator(TestCase):
-    @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook")
+    @mock.patch(
+        "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook",
+        **{"return_value.get_entry.return_value": TEST_ENTRY},  # type: ignore
+    )
     def test_assert_valid_hook_call(self, mock_hook) -> None:
         task = CloudDataCatalogGetEntryOperator(
             task_id="task_id",
@@ -529,7 +542,10 @@ class TestCloudDataCatalogGetEntryOperator(TestCase):
 
 
 class TestCloudDataCatalogGetEntryGroupOperator(TestCase):
-    @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook")
+    @mock.patch(
+        "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook",
+        **{"return_value.get_entry_group.return_value": TEST_ENTRY_GROUP},  # type: ignore
+    )
     def test_assert_valid_hook_call(self, mock_hook) -> None:
         task = CloudDataCatalogGetEntryGroupOperator(
             task_id="task_id",
@@ -560,7 +576,10 @@ class TestCloudDataCatalogGetEntryGroupOperator(TestCase):
 
 
 class TestCloudDataCatalogGetTagTemplateOperator(TestCase):
-    @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook")
+    @mock.patch(
+        "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook",
+        **{"return_value.get_tag_template.return_value": TEST_TAG_TEMPLATE},  # type: ignore
+    )
     def test_assert_valid_hook_call(self, mock_hook) -> None:
         task = CloudDataCatalogGetTagTemplateOperator(
             task_id="task_id",
@@ -589,7 +608,10 @@ class TestCloudDataCatalogGetTagTemplateOperator(TestCase):
 
 
 class TestCloudDataCatalogListTagsOperator(TestCase):
-    @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook")
+    @mock.patch(
+        "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook",
+        **{"return_value.list_tags.return_value": [TEST_TAG]},  # type: ignore
+    )
     def test_assert_valid_hook_call(self, mock_hook) -> None:
         task = CloudDataCatalogListTagsOperator(
             task_id="task_id",
@@ -622,7 +644,10 @@ class TestCloudDataCatalogListTagsOperator(TestCase):
 
 
 class TestCloudDataCatalogLookupEntryOperator(TestCase):
-    @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook")
+    @mock.patch(
+        "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook",
+        **{"return_value.lookup_entry.return_value": TEST_ENTRY},  # type: ignore
+    )
     def test_assert_valid_hook_call(self, mock_hook) -> None:
         task = CloudDataCatalogLookupEntryOperator(
             task_id="task_id",


[airflow] 33/41: Remove reinstalling azure-storage steps from CI / Breeze (#14102)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit b31484f11f60c53a5b3e6fa3c9420609d268724b
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Mon Feb 8 21:56:46 2021 +0000

    Remove reinstalling azure-storage steps from CI / Breeze (#14102)
    
    Since https://github.com/apache/airflow/pull/12188 was merged I
    don't think we need this steps.
    
    This step also caused the docker build step for 2.0.1rc2 to fail
    
    Co-authored-by: Jarek Potiuk <ja...@potiuk.com>
    (cherry picked from commit 3ffd21745d25e6239254fe3f5688b34f5f6f77e8)
---
 scripts/docker/install_airflow.sh                  |  7 +------
 scripts/in_container/_in_container_utils.sh        | 22 ++++------------------
 scripts/in_container/entrypoint_ci.sh              |  4 ++--
 scripts/in_container/run_ci_tests.sh               |  2 --
 .../run_install_and_test_provider_packages.sh      |  5 ++---
 .../run_prepare_provider_documentation.sh          |  1 -
 setup.py                                           | 18 +++++-------------
 7 files changed, 14 insertions(+), 45 deletions(-)

diff --git a/scripts/docker/install_airflow.sh b/scripts/docker/install_airflow.sh
index bfe88be..5f1e9d9 100755
--- a/scripts/docker/install_airflow.sh
+++ b/scripts/docker/install_airflow.sh
@@ -66,9 +66,7 @@ function install_airflow() {
             pip install ${AIRFLOW_INSTALL_EDITABLE_FLAG} \
                 "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}"
         fi
-        # Work around to install azure-storage-blob
-        pip uninstall azure-storage azure-storage-blob azure-storage-file --yes
-        pip install azure-storage-blob azure-storage-file
+
         # make sure correct PIP version is used
         pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}"
         pip check || ${CONTINUE_ON_PIP_CHECK_FAILURE}
@@ -85,9 +83,6 @@ function install_airflow() {
         pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade --upgrade-strategy only-if-needed \
             ${AIRFLOW_INSTALL_EDITABLE_FLAG} \
             "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \
-        # Work around to install azure-storage-blob
-        pip uninstall azure-storage azure-storage-blob azure-storage-file --yes
-        pip install azure-storage-blob azure-storage-file
         # make sure correct PIP version is used
         pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}"
         pip check || ${CONTINUE_ON_PIP_CHECK_FAILURE}
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index 0a9db95..1e9a192 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -275,7 +275,7 @@ function install_airflow_from_wheel() {
         >&2 echo
         exit 4
     fi
-    pip install "${airflow_package}${1}"
+    pip install "${airflow_package}${extras}"
 }
 
 function install_airflow_from_sdist() {
@@ -292,20 +292,7 @@ function install_airflow_from_sdist() {
         >&2 echo
         exit 4
     fi
-    pip install "${airflow_package}${1}"
-}
-
-function reinstall_azure_storage_blob() {
-    group_start "Reinstalls azure-storage-blob (temporary workaround)"
-    # Reinstall azure-storage-blob here until https://github.com/apache/airflow/pull/12188 is solved
-    # Azure-storage-blob need to be reinstalled to overwrite azure-storage-blob installed by old version
-    # of the `azure-storage` library
-    echo
-    echo "Reinstalling azure-storage-blob"
-    echo
-    pip uninstall azure-storage azure-storage-blob azure-storage-file --yes
-    pip install azure-storage-blob azure-storage-file --no-deps --force-reinstall
-    group_end
+    pip install "${airflow_package}${extras}"
 }
 
 function install_remaining_dependencies() {
@@ -338,13 +325,12 @@ function uninstall_airflow_and_providers() {
 
 function install_released_airflow_version() {
     local version="${1}"
-    local extras="${2}"
     echo
-    echo "Installing released ${version} version of airflow with extras ${extras}"
+    echo "Installing released ${version} version of airflow without extras"
     echo
 
     rm -rf "${AIRFLOW_SOURCES}"/*.egg-info
-    pip install --upgrade "apache-airflow${extras}==${version}"
+    pip install --upgrade "apache-airflow==${version}"
 }
 
 function install_local_airflow_with_eager_upgrade() {
diff --git a/scripts/in_container/entrypoint_ci.sh b/scripts/in_container/entrypoint_ci.sh
index 3761a3b..b99cdc1 100755
--- a/scripts/in_container/entrypoint_ci.sh
+++ b/scripts/in_container/entrypoint_ci.sh
@@ -98,9 +98,9 @@ elif [[ ${INSTALL_AIRFLOW_VERSION} == "sdist"  ]]; then
     uninstall_providers
 else
     echo
-    echo "Install airflow from PyPI including [${AIRFLOW_EXTRAS}] extras"
+    echo "Install airflow from PyPI without extras"
     echo
-    install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" "[${AIRFLOW_EXTRAS}]"
+    install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}"
 fi
 if [[ ${INSTALL_PACKAGES_FROM_DIST=} == "true" ]]; then
     echo
diff --git a/scripts/in_container/run_ci_tests.sh b/scripts/in_container/run_ci_tests.sh
index 43be453..ca3c41d 100755
--- a/scripts/in_container/run_ci_tests.sh
+++ b/scripts/in_container/run_ci_tests.sh
@@ -18,8 +18,6 @@
 # shellcheck source=scripts/in_container/_in_container_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh"
 
-reinstall_azure_storage_blob
-
 echo
 echo "Starting the tests with those pytest arguments:" "${@}"
 echo
diff --git a/scripts/in_container/run_install_and_test_provider_packages.sh b/scripts/in_container/run_install_and_test_provider_packages.sh
index 9b951c7..76d41e4 100755
--- a/scripts/in_container/run_install_and_test_provider_packages.sh
+++ b/scripts/in_container/run_install_and_test_provider_packages.sh
@@ -67,9 +67,9 @@ function install_airflow_as_specified() {
         uninstall_providers
     else
         echo
-        echo "Install airflow from PyPI including [${AIRFLOW_EXTRAS}] extras"
+        echo "Install airflow from PyPI without extras"
         echo
-        install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" "[${AIRFLOW_EXTRAS}]"
+        install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}"
         uninstall_providers
     fi
     group_end
@@ -197,7 +197,6 @@ setup_provider_packages
 verify_parameters
 install_airflow_as_specified
 install_remaining_dependencies
-reinstall_azure_storage_blob
 install_provider_packages
 import_all_provider_classes
 
diff --git a/scripts/in_container/run_prepare_provider_documentation.sh b/scripts/in_container/run_prepare_provider_documentation.sh
index e88cdfc..1a0bfa8 100755
--- a/scripts/in_container/run_prepare_provider_documentation.sh
+++ b/scripts/in_container/run_prepare_provider_documentation.sh
@@ -100,7 +100,6 @@ install_supported_pip_version
 # install extra packages missing in devel_ci
 # TODO: remove it when devel_all == devel_ci
 install_remaining_dependencies
-reinstall_azure_storage_blob
 
 if [[ ${BACKPORT_PACKAGES} != "true" ]]; then
     import_all_provider_classes
diff --git a/setup.py b/setup.py
index cd38ef2..a752d82 100644
--- a/setup.py
+++ b/setup.py
@@ -219,6 +219,8 @@ azure = [
     'azure-mgmt-containerinstance>=1.5.0,<2.0',
     'azure-mgmt-datalake-store>=0.5.0',
     'azure-mgmt-resource>=2.2.0',
+    'azure-storage-blob>=12.7.0',
+    'azure-storage-common>=2.1.0',
     'azure-storage-file>=2.1.0',
 ]
 cassandra = [
@@ -423,19 +425,9 @@ slack = [
     'slack_sdk>=3.0.0,<4.0.0',
 ]
 snowflake = [
-    # The `azure` provider uses legacy `azure-storage` library, where `snowflake` uses the
-    # newer and more stable versions of those libraries. Most of `azure` operators and hooks work
-    # fine together with `snowflake` because the deprecated library does not overlap with the
-    # new libraries except the `blob` classes. So while `azure` works fine for most cases
-    # blob is the only exception
-    # Solution to that is being worked on in https://github.com/apache/airflow/pull/12188
-    # once it is merged, we can move those two back to `azure` extra.
-    'azure-core>=1.10.0',
-    'azure-storage-blob',
-    'azure-storage-common',
-    # Snowflake conector > 2.3.8 is needed because it has vendored urrllib3 and requests libraries which
-    # are monkey-patched. In earlier versions of the library, monkeypatching the libraries by snowflake
-    # caused other providers to fail (Google, Amazon etc.)
+    # Snowflake connector > 2.3.8 is needed because it has vendored-in, patched urllib and requests libraries
+    # In earlier versions of the snowflake library, monkey-patching the libraries caused other
+    # providers to fail (Google, Amazon etc.)
     'snowflake-connector-python>=2.3.8',
     'snowflake-sqlalchemy>=1.1.0',
 ]


[airflow] 06/41: Use DAG context manager in examples (#13297)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 7a6d568bc40af18c3777698b077e2b4c55f7859b
Author: Jennifer Melot <jt...@gmail.com>
AuthorDate: Wed Jan 20 07:16:12 2021 -0500

    Use DAG context manager in examples (#13297)
    
    (cherry picked from commit 9923d606d2887c52390a30639fc1ee0d4000149c)
---
 airflow/example_dags/example_bash_operator.py      |  54 ++++-----
 airflow/example_dags/example_branch_operator.py    |  51 ++++-----
 .../example_branch_python_dop_operator_3.py        |  31 +++--
 airflow/example_dags/example_latest_only.py        |  10 +-
 .../example_latest_only_with_trigger.py            |  18 +--
 .../example_passing_params_via_test_command.py     |  66 +++++------
 airflow/example_dags/example_python_operator.py    | 127 ++++++++++-----------
 .../example_dags/example_short_circuit_operator.py |  30 +++--
 airflow/example_dags/example_skip_dag.py           |   6 +-
 airflow/example_dags/example_subdag_operator.py    |  45 ++++----
 .../example_dags/example_trigger_controller_dag.py |  15 ++-
 airflow/example_dags/example_trigger_target_dag.py |  29 +++--
 airflow/example_dags/example_xcom.py               |  45 ++++----
 airflow/example_dags/test_utils.py                 |  15 ++-
 airflow/example_dags/tutorial.py                   |  92 +++++++--------
 airflow/example_dags/tutorial_etl_dag.py           |  41 ++++---
 docs/apache-airflow/executor/kubernetes.rst        |   2 +
 docs/apache-airflow/howto/operator/bash.rst        |   2 +
 .../howto/operator/external_task_sensor.rst        |   2 +
 docs/apache-airflow/howto/operator/python.rst      |   3 +
 docs/apache-airflow/tutorial.rst                   |   3 +
 docs/apache-airflow/tutorial_taskflow_api.rst      |   6 +
 22 files changed, 344 insertions(+), 349 deletions(-)

diff --git a/airflow/example_dags/example_bash_operator.py b/airflow/example_dags/example_bash_operator.py
index 1c22fff..0665971 100644
--- a/airflow/example_dags/example_bash_operator.py
+++ b/airflow/example_dags/example_bash_operator.py
@@ -29,7 +29,7 @@ args = {
     'owner': 'airflow',
 }
 
-dag = DAG(
+with DAG(
     dag_id='example_bash_operator',
     default_args=args,
     schedule_interval='0 0 * * *',
@@ -37,39 +37,35 @@ dag = DAG(
     dagrun_timeout=timedelta(minutes=60),
     tags=['example', 'example2'],
     params={"example_key": "example_value"},
-)
+) as dag:
 
-run_this_last = DummyOperator(
-    task_id='run_this_last',
-    dag=dag,
-)
+    run_this_last = DummyOperator(
+        task_id='run_this_last',
+    )
+
+    # [START howto_operator_bash]
+    run_this = BashOperator(
+        task_id='run_after_loop',
+        bash_command='echo 1',
+    )
+    # [END howto_operator_bash]
 
-# [START howto_operator_bash]
-run_this = BashOperator(
-    task_id='run_after_loop',
-    bash_command='echo 1',
-    dag=dag,
-)
-# [END howto_operator_bash]
+    run_this >> run_this_last
 
-run_this >> run_this_last
+    for i in range(3):
+        task = BashOperator(
+            task_id='runme_' + str(i),
+            bash_command='echo "{{ task_instance_key_str }}" && sleep 1',
+        )
+        task >> run_this
 
-for i in range(3):
-    task = BashOperator(
-        task_id='runme_' + str(i),
-        bash_command='echo "{{ task_instance_key_str }}" && sleep 1',
-        dag=dag,
+    # [START howto_operator_bash_template]
+    also_run_this = BashOperator(
+        task_id='also_run_this',
+        bash_command='echo "run_id={{ run_id }} | dag_run={{ dag_run }}"',
     )
-    task >> run_this
-
-# [START howto_operator_bash_template]
-also_run_this = BashOperator(
-    task_id='also_run_this',
-    bash_command='echo "run_id={{ run_id }} | dag_run={{ dag_run }}"',
-    dag=dag,
-)
-# [END howto_operator_bash_template]
-also_run_this >> run_this_last
+    # [END howto_operator_bash_template]
+    also_run_this >> run_this_last
 
 if __name__ == "__main__":
     dag.cli()
diff --git a/airflow/example_dags/example_branch_operator.py b/airflow/example_dags/example_branch_operator.py
index 50eb328..7c5e166 100644
--- a/airflow/example_dags/example_branch_operator.py
+++ b/airflow/example_dags/example_branch_operator.py
@@ -29,43 +29,38 @@ args = {
     'owner': 'airflow',
 }
 
-dag = DAG(
+with DAG(
     dag_id='example_branch_operator',
     default_args=args,
     start_date=days_ago(2),
     schedule_interval="@daily",
     tags=['example', 'example2'],
-)
+) as dag:
 
-run_this_first = DummyOperator(
-    task_id='run_this_first',
-    dag=dag,
-)
-
-options = ['branch_a', 'branch_b', 'branch_c', 'branch_d']
-
-branching = BranchPythonOperator(
-    task_id='branching',
-    python_callable=lambda: random.choice(options),
-    dag=dag,
-)
-run_this_first >> branching
+    run_this_first = DummyOperator(
+        task_id='run_this_first',
+    )
 
-join = DummyOperator(
-    task_id='join',
-    trigger_rule='none_failed_or_skipped',
-    dag=dag,
-)
+    options = ['branch_a', 'branch_b', 'branch_c', 'branch_d']
 
-for option in options:
-    t = DummyOperator(
-        task_id=option,
-        dag=dag,
+    branching = BranchPythonOperator(
+        task_id='branching',
+        python_callable=lambda: random.choice(options),
     )
+    run_this_first >> branching
 
-    dummy_follow = DummyOperator(
-        task_id='follow_' + option,
-        dag=dag,
+    join = DummyOperator(
+        task_id='join',
+        trigger_rule='none_failed_or_skipped',
     )
 
-    branching >> t >> dummy_follow >> join
+    for option in options:
+        t = DummyOperator(
+            task_id=option,
+        )
+
+        dummy_follow = DummyOperator(
+            task_id='follow_' + option,
+        )
+
+        branching >> t >> dummy_follow >> join
diff --git a/airflow/example_dags/example_branch_python_dop_operator_3.py b/airflow/example_dags/example_branch_python_dop_operator_3.py
index f01fc50..badad5a 100644
--- a/airflow/example_dags/example_branch_python_dop_operator_3.py
+++ b/airflow/example_dags/example_branch_python_dop_operator_3.py
@@ -31,14 +31,6 @@ args = {
     'depends_on_past': True,
 }
 
-dag = DAG(
-    dag_id='example_branch_dop_operator_v3',
-    schedule_interval='*/1 * * * *',
-    start_date=days_ago(2),
-    default_args=args,
-    tags=['example'],
-)
-
 
 def should_run(**kwargs):
     """
@@ -59,12 +51,19 @@ def should_run(**kwargs):
         return "dummy_task_2"
 
 
-cond = BranchPythonOperator(
-    task_id='condition',
-    python_callable=should_run,
-    dag=dag,
-)
+with DAG(
+    dag_id='example_branch_dop_operator_v3',
+    schedule_interval='*/1 * * * *',
+    start_date=days_ago(2),
+    default_args=args,
+    tags=['example'],
+) as dag:
+
+    cond = BranchPythonOperator(
+        task_id='condition',
+        python_callable=should_run,
+    )
 
-dummy_task_1 = DummyOperator(task_id='dummy_task_1', dag=dag)
-dummy_task_2 = DummyOperator(task_id='dummy_task_2', dag=dag)
-cond >> [dummy_task_1, dummy_task_2]
+    dummy_task_1 = DummyOperator(task_id='dummy_task_1')
+    dummy_task_2 = DummyOperator(task_id='dummy_task_2')
+    cond >> [dummy_task_1, dummy_task_2]
diff --git a/airflow/example_dags/example_latest_only.py b/airflow/example_dags/example_latest_only.py
index 272a054..d0d5db0 100644
--- a/airflow/example_dags/example_latest_only.py
+++ b/airflow/example_dags/example_latest_only.py
@@ -25,14 +25,14 @@ from airflow.operators.dummy import DummyOperator
 from airflow.operators.latest_only import LatestOnlyOperator
 from airflow.utils.dates import days_ago
 
-dag = DAG(
+with DAG(
     dag_id='latest_only',
     schedule_interval=dt.timedelta(hours=4),
     start_date=days_ago(2),
     tags=['example2', 'example3'],
-)
+) as dag:
 
-latest_only = LatestOnlyOperator(task_id='latest_only', dag=dag)
-task1 = DummyOperator(task_id='task1', dag=dag)
+    latest_only = LatestOnlyOperator(task_id='latest_only')
+    task1 = DummyOperator(task_id='task1')
 
-latest_only >> task1
+    latest_only >> task1
diff --git a/airflow/example_dags/example_latest_only_with_trigger.py b/airflow/example_dags/example_latest_only_with_trigger.py
index 9178278..a8e96e7 100644
--- a/airflow/example_dags/example_latest_only_with_trigger.py
+++ b/airflow/example_dags/example_latest_only_with_trigger.py
@@ -28,19 +28,19 @@ from airflow.operators.latest_only import LatestOnlyOperator
 from airflow.utils.dates import days_ago
 from airflow.utils.trigger_rule import TriggerRule
 
-dag = DAG(
+with DAG(
     dag_id='latest_only_with_trigger',
     schedule_interval=dt.timedelta(hours=4),
     start_date=days_ago(2),
     tags=['example3'],
-)
+) as dag:
 
-latest_only = LatestOnlyOperator(task_id='latest_only', dag=dag)
-task1 = DummyOperator(task_id='task1', dag=dag)
-task2 = DummyOperator(task_id='task2', dag=dag)
-task3 = DummyOperator(task_id='task3', dag=dag)
-task4 = DummyOperator(task_id='task4', dag=dag, trigger_rule=TriggerRule.ALL_DONE)
+    latest_only = LatestOnlyOperator(task_id='latest_only')
+    task1 = DummyOperator(task_id='task1')
+    task2 = DummyOperator(task_id='task2')
+    task3 = DummyOperator(task_id='task3')
+    task4 = DummyOperator(task_id='task4', trigger_rule=TriggerRule.ALL_DONE)
 
-latest_only >> task1 >> [task3, task4]
-task2 >> [task3, task4]
+    latest_only >> task1 >> [task3, task4]
+    task2 >> [task3, task4]
 # [END example]
diff --git a/airflow/example_dags/example_passing_params_via_test_command.py b/airflow/example_dags/example_passing_params_via_test_command.py
index 456def2..d2ecdfa 100644
--- a/airflow/example_dags/example_passing_params_via_test_command.py
+++ b/airflow/example_dags/example_passing_params_via_test_command.py
@@ -20,23 +20,13 @@
 
 import os
 from datetime import timedelta
+from textwrap import dedent
 
 from airflow import DAG
 from airflow.operators.bash import BashOperator
 from airflow.operators.python import PythonOperator
 from airflow.utils.dates import days_ago
 
-dag = DAG(
-    "example_passing_params_via_test_command",
-    default_args={
-        "owner": "airflow",
-    },
-    schedule_interval='*/1 * * * *',
-    start_date=days_ago(1),
-    dagrun_timeout=timedelta(minutes=4),
-    tags=['example'],
-)
-
 
 def my_py_command(test_mode, params):
     """
@@ -56,26 +46,6 @@ def my_py_command(test_mode, params):
     return 1
 
 
-my_templated_command = """
-    echo " 'foo was passed in via Airflow CLI Test command with value {{ params.foo }} "
-    echo " 'miff was passed in via BashOperator with value {{ params.miff }} "
-"""
-
-run_this = PythonOperator(
-    task_id='run_this',
-    python_callable=my_py_command,
-    params={"miff": "agg"},
-    dag=dag,
-)
-
-also_run_this = BashOperator(
-    task_id='also_run_this',
-    bash_command=my_templated_command,
-    params={"miff": "agg"},
-    dag=dag,
-)
-
-
 def print_env_vars(test_mode):
     """
     Print out the "foo" param passed in via
@@ -87,6 +57,36 @@ def print_env_vars(test_mode):
         print(f"AIRFLOW_TEST_MODE={os.environ.get('AIRFLOW_TEST_MODE')}")
 
 
-env_var_test_task = PythonOperator(task_id='env_var_test_task', python_callable=print_env_vars, dag=dag)
+with DAG(
+    "example_passing_params_via_test_command",
+    default_args={
+        "owner": "airflow",
+    },
+    schedule_interval='*/1 * * * *',
+    start_date=days_ago(1),
+    dagrun_timeout=timedelta(minutes=4),
+    tags=['example'],
+) as dag:
+
+    my_templated_command = dedent(
+        """
+        echo " 'foo was passed in via Airflow CLI Test command with value {{ params.foo }} "
+        echo " 'miff was passed in via BashOperator with value {{ params.miff }} "
+    """
+    )
+
+    run_this = PythonOperator(
+        task_id='run_this',
+        python_callable=my_py_command,
+        params={"miff": "agg"},
+    )
+
+    also_run_this = BashOperator(
+        task_id='also_run_this',
+        bash_command=my_templated_command,
+        params={"miff": "agg"},
+    )
+
+    env_var_test_task = PythonOperator(task_id='env_var_test_task', python_callable=print_env_vars)
 
-run_this >> also_run_this
+    run_this >> also_run_this
diff --git a/airflow/example_dags/example_python_operator.py b/airflow/example_dags/example_python_operator.py
index d5e16a5..a9db342 100644
--- a/airflow/example_dags/example_python_operator.py
+++ b/airflow/example_dags/example_python_operator.py
@@ -28,77 +28,68 @@ args = {
     'owner': 'airflow',
 }
 
-dag = DAG(
+with DAG(
     dag_id='example_python_operator',
     default_args=args,
     schedule_interval=None,
     start_date=days_ago(2),
     tags=['example'],
-)
-
-
-# [START howto_operator_python]
-def print_context(ds, **kwargs):
-    """Print the Airflow context and ds variable from the context."""
-    pprint(kwargs)
-    print(ds)
-    return 'Whatever you return gets printed in the logs'
-
-
-run_this = PythonOperator(
-    task_id='print_the_context',
-    python_callable=print_context,
-    dag=dag,
-)
-# [END howto_operator_python]
-
-
-# [START howto_operator_python_kwargs]
-def my_sleeping_function(random_base):
-    """This is a function that will run within the DAG execution"""
-    time.sleep(random_base)
-
-
-# Generate 5 sleeping tasks, sleeping from 0.0 to 0.4 seconds respectively
-for i in range(5):
-    task = PythonOperator(
-        task_id='sleep_for_' + str(i),
-        python_callable=my_sleeping_function,
-        op_kwargs={'random_base': float(i) / 10},
-        dag=dag,
+) as dag:
+
+    # [START howto_operator_python]
+    def print_context(ds, **kwargs):
+        """Print the Airflow context and ds variable from the context."""
+        pprint(kwargs)
+        print(ds)
+        return 'Whatever you return gets printed in the logs'
+
+    run_this = PythonOperator(
+        task_id='print_the_context',
+        python_callable=print_context,
     )
-
-    run_this >> task
-# [END howto_operator_python_kwargs]
-
-
-# [START howto_operator_python_venv]
-def callable_virtualenv():
-    """
-    Example function that will be performed in a virtual environment.
-
-    Importing at the module level ensures that it will not attempt to import the
-    library before it is installed.
-    """
-    from time import sleep
-
-    from colorama import Back, Fore, Style
-
-    print(Fore.RED + 'some red text')
-    print(Back.GREEN + 'and with a green background')
-    print(Style.DIM + 'and in dim text')
-    print(Style.RESET_ALL)
-    for _ in range(10):
-        print(Style.DIM + 'Please wait...', flush=True)
-        sleep(10)
-    print('Finished')
-
-
-virtualenv_task = PythonVirtualenvOperator(
-    task_id="virtualenv_python",
-    python_callable=callable_virtualenv,
-    requirements=["colorama==0.4.0"],
-    system_site_packages=False,
-    dag=dag,
-)
-# [END howto_operator_python_venv]
+    # [END howto_operator_python]
+
+    # [START howto_operator_python_kwargs]
+    def my_sleeping_function(random_base):
+        """This is a function that will run within the DAG execution"""
+        time.sleep(random_base)
+
+    # Generate 5 sleeping tasks, sleeping from 0.0 to 0.4 seconds respectively
+    for i in range(5):
+        task = PythonOperator(
+            task_id='sleep_for_' + str(i),
+            python_callable=my_sleeping_function,
+            op_kwargs={'random_base': float(i) / 10},
+        )
+
+        run_this >> task
+    # [END howto_operator_python_kwargs]
+
+    # [START howto_operator_python_venv]
+    def callable_virtualenv():
+        """
+        Example function that will be performed in a virtual environment.
+
+        Importing at the module level ensures that it will not attempt to import the
+        library before it is installed.
+        """
+        from time import sleep
+
+        from colorama import Back, Fore, Style
+
+        print(Fore.RED + 'some red text')
+        print(Back.GREEN + 'and with a green background')
+        print(Style.DIM + 'and in dim text')
+        print(Style.RESET_ALL)
+        for _ in range(10):
+            print(Style.DIM + 'Please wait...', flush=True)
+            sleep(10)
+        print('Finished')
+
+    virtualenv_task = PythonVirtualenvOperator(
+        task_id="virtualenv_python",
+        python_callable=callable_virtualenv,
+        requirements=["colorama==0.4.0"],
+        system_site_packages=False,
+    )
+    # [END howto_operator_python_venv]
diff --git a/airflow/example_dags/example_short_circuit_operator.py b/airflow/example_dags/example_short_circuit_operator.py
index 38163a0..3836ef9 100644
--- a/airflow/example_dags/example_short_circuit_operator.py
+++ b/airflow/example_dags/example_short_circuit_operator.py
@@ -27,27 +27,25 @@ args = {
     'owner': 'airflow',
 }
 
-dag = DAG(
+with DAG(
     dag_id='example_short_circuit_operator',
     default_args=args,
     start_date=dates.days_ago(2),
     tags=['example'],
-)
+) as dag:
 
-cond_true = ShortCircuitOperator(
-    task_id='condition_is_True',
-    python_callable=lambda: True,
-    dag=dag,
-)
+    cond_true = ShortCircuitOperator(
+        task_id='condition_is_True',
+        python_callable=lambda: True,
+    )
 
-cond_false = ShortCircuitOperator(
-    task_id='condition_is_False',
-    python_callable=lambda: False,
-    dag=dag,
-)
+    cond_false = ShortCircuitOperator(
+        task_id='condition_is_False',
+        python_callable=lambda: False,
+    )
 
-ds_true = [DummyOperator(task_id='true_' + str(i), dag=dag) for i in [1, 2]]
-ds_false = [DummyOperator(task_id='false_' + str(i), dag=dag) for i in [1, 2]]
+    ds_true = [DummyOperator(task_id='true_' + str(i)) for i in [1, 2]]
+    ds_false = [DummyOperator(task_id='false_' + str(i)) for i in [1, 2]]
 
-chain(cond_true, *ds_true)
-chain(cond_false, *ds_false)
+    chain(cond_true, *ds_true)
+    chain(cond_false, *ds_false)
diff --git a/airflow/example_dags/example_skip_dag.py b/airflow/example_dags/example_skip_dag.py
index 633dc5e..77fbf4a 100644
--- a/airflow/example_dags/example_skip_dag.py
+++ b/airflow/example_dags/example_skip_dag.py
@@ -56,6 +56,6 @@ def create_test_pipeline(suffix, trigger_rule, dag_):
     join >> final
 
 
-dag = DAG(dag_id='example_skip_dag', default_args=args, start_date=days_ago(2), tags=['example'])
-create_test_pipeline('1', 'all_success', dag)
-create_test_pipeline('2', 'one_success', dag)
+with DAG(dag_id='example_skip_dag', default_args=args, start_date=days_ago(2), tags=['example']) as dag:
+    create_test_pipeline('1', 'all_success', dag)
+    create_test_pipeline('2', 'one_success', dag)
diff --git a/airflow/example_dags/example_subdag_operator.py b/airflow/example_dags/example_subdag_operator.py
index be88281..f27aec7 100644
--- a/airflow/example_dags/example_subdag_operator.py
+++ b/airflow/example_dags/example_subdag_operator.py
@@ -31,36 +31,31 @@ args = {
     'owner': 'airflow',
 }
 
-dag = DAG(
+with DAG(
     dag_id=DAG_NAME, default_args=args, start_date=days_ago(2), schedule_interval="@once", tags=['example']
-)
+) as dag:
 
-start = DummyOperator(
-    task_id='start',
-    dag=dag,
-)
+    start = DummyOperator(
+        task_id='start',
+    )
 
-section_1 = SubDagOperator(
-    task_id='section-1',
-    subdag=subdag(DAG_NAME, 'section-1', args),
-    dag=dag,
-)
+    section_1 = SubDagOperator(
+        task_id='section-1',
+        subdag=subdag(DAG_NAME, 'section-1', args),
+    )
 
-some_other_task = DummyOperator(
-    task_id='some-other-task',
-    dag=dag,
-)
+    some_other_task = DummyOperator(
+        task_id='some-other-task',
+    )
 
-section_2 = SubDagOperator(
-    task_id='section-2',
-    subdag=subdag(DAG_NAME, 'section-2', args),
-    dag=dag,
-)
+    section_2 = SubDagOperator(
+        task_id='section-2',
+        subdag=subdag(DAG_NAME, 'section-2', args),
+    )
 
-end = DummyOperator(
-    task_id='end',
-    dag=dag,
-)
+    end = DummyOperator(
+        task_id='end',
+    )
 
-start >> section_1 >> some_other_task >> section_2 >> end
+    start >> section_1 >> some_other_task >> section_2 >> end
 # [END example_subdag_operator]
diff --git a/airflow/example_dags/example_trigger_controller_dag.py b/airflow/example_dags/example_trigger_controller_dag.py
index 0f706c7..9d02399 100644
--- a/airflow/example_dags/example_trigger_controller_dag.py
+++ b/airflow/example_dags/example_trigger_controller_dag.py
@@ -25,17 +25,16 @@ from airflow import DAG
 from airflow.operators.trigger_dagrun import TriggerDagRunOperator
 from airflow.utils.dates import days_ago
 
-dag = DAG(
+with DAG(
     dag_id="example_trigger_controller_dag",
     default_args={"owner": "airflow"},
     start_date=days_ago(2),
     schedule_interval="@once",
     tags=['example'],
-)
+) as dag:
 
-trigger = TriggerDagRunOperator(
-    task_id="test_trigger_dagrun",
-    trigger_dag_id="example_trigger_target_dag",  # Ensure this equals the dag_id of the DAG to trigger
-    conf={"message": "Hello World"},
-    dag=dag,
-)
+    trigger = TriggerDagRunOperator(
+        task_id="test_trigger_dagrun",
+        trigger_dag_id="example_trigger_target_dag",  # Ensure this equals the dag_id of the DAG to trigger
+        conf={"message": "Hello World"},
+    )
diff --git a/airflow/example_dags/example_trigger_target_dag.py b/airflow/example_dags/example_trigger_target_dag.py
index f431dc4..2b4661b 100644
--- a/airflow/example_dags/example_trigger_target_dag.py
+++ b/airflow/example_dags/example_trigger_target_dag.py
@@ -27,14 +27,6 @@ from airflow.operators.bash import BashOperator
 from airflow.operators.python import PythonOperator
 from airflow.utils.dates import days_ago
 
-dag = DAG(
-    dag_id="example_trigger_target_dag",
-    default_args={"owner": "airflow"},
-    start_date=days_ago(2),
-    schedule_interval=None,
-    tags=['example'],
-)
-
 
 def run_this_func(**context):
     """
@@ -46,11 +38,18 @@ def run_this_func(**context):
     print(f"Remotely received value of {context['dag_run'].conf['message']} for key=message")
 
 
-run_this = PythonOperator(task_id="run_this", python_callable=run_this_func, dag=dag)
+with DAG(
+    dag_id="example_trigger_target_dag",
+    default_args={"owner": "airflow"},
+    start_date=days_ago(2),
+    schedule_interval=None,
+    tags=['example'],
+) as dag:
+
+    run_this = PythonOperator(task_id="run_this", python_callable=run_this_func)
 
-bash_task = BashOperator(
-    task_id="bash_task",
-    bash_command='echo "Here is the message: $message"',
-    env={'message': '{{ dag_run.conf["message"] if dag_run else "" }}'},
-    dag=dag,
-)
+    bash_task = BashOperator(
+        task_id="bash_task",
+        bash_command='echo "Here is the message: $message"',
+        env={'message': '{{ dag_run.conf["message"] if dag_run else "" }}'},
+    )
diff --git a/airflow/example_dags/example_xcom.py b/airflow/example_dags/example_xcom.py
index 779e392..03f85d9 100644
--- a/airflow/example_dags/example_xcom.py
+++ b/airflow/example_dags/example_xcom.py
@@ -21,14 +21,6 @@ from airflow import DAG
 from airflow.operators.python import PythonOperator
 from airflow.utils.dates import days_ago
 
-dag = DAG(
-    'example_xcom',
-    schedule_interval="@once",
-    start_date=days_ago(2),
-    default_args={'owner': 'airflow'},
-    tags=['example'],
-)
-
 value_1 = [1, 2, 3]
 value_2 = {'a': 'b'}
 
@@ -65,22 +57,27 @@ def puller(**kwargs):
         raise ValueError(f'The two values differ {pulled_value_2} and {value_2}')
 
 
-push1 = PythonOperator(
-    task_id='push',
-    dag=dag,
-    python_callable=push,
-)
+with DAG(
+    'example_xcom',
+    schedule_interval="@once",
+    start_date=days_ago(2),
+    default_args={'owner': 'airflow'},
+    tags=['example'],
+) as dag:
+
+    push1 = PythonOperator(
+        task_id='push',
+        python_callable=push,
+    )
 
-push2 = PythonOperator(
-    task_id='push_by_returning',
-    dag=dag,
-    python_callable=push_by_returning,
-)
+    push2 = PythonOperator(
+        task_id='push_by_returning',
+        python_callable=push_by_returning,
+    )
 
-pull = PythonOperator(
-    task_id='puller',
-    dag=dag,
-    python_callable=puller,
-)
+    pull = PythonOperator(
+        task_id='puller',
+        python_callable=puller,
+    )
 
-pull << [push1, push2]
+    pull << [push1, push2]
diff --git a/airflow/example_dags/test_utils.py b/airflow/example_dags/test_utils.py
index a1a2ed0..0211dfb 100644
--- a/airflow/example_dags/test_utils.py
+++ b/airflow/example_dags/test_utils.py
@@ -20,12 +20,11 @@ from airflow import DAG
 from airflow.operators.bash import BashOperator
 from airflow.utils.dates import days_ago
 
-dag = DAG(dag_id='test_utils', schedule_interval=None, tags=['example'])
+with DAG(dag_id='test_utils', schedule_interval=None, tags=['example']) as dag:
 
-task = BashOperator(
-    task_id='sleeps_forever',
-    dag=dag,
-    bash_command="sleep 10000000000",
-    start_date=days_ago(2),
-    owner='airflow',
-)
+    task = BashOperator(
+        task_id='sleeps_forever',
+        bash_command="sleep 10000000000",
+        start_date=days_ago(2),
+        owner='airflow',
+    )
diff --git a/airflow/example_dags/tutorial.py b/airflow/example_dags/tutorial.py
index a00051c..518c801 100644
--- a/airflow/example_dags/tutorial.py
+++ b/airflow/example_dags/tutorial.py
@@ -24,6 +24,7 @@ Documentation that goes along with the Airflow tutorial located
 # [START tutorial]
 # [START import_module]
 from datetime import timedelta
+from textwrap import dedent
 
 # The DAG object; we'll need this to instantiate a DAG
 from airflow import DAG
@@ -62,62 +63,63 @@ default_args = {
 # [END default_args]
 
 # [START instantiate_dag]
-dag = DAG(
+with DAG(
     'tutorial',
     default_args=default_args,
     description='A simple tutorial DAG',
     schedule_interval=timedelta(days=1),
     start_date=days_ago(2),
     tags=['example'],
-)
-# [END instantiate_dag]
+) as dag:
+    # [END instantiate_dag]
 
-# t1, t2 and t3 are examples of tasks created by instantiating operators
-# [START basic_task]
-t1 = BashOperator(
-    task_id='print_date',
-    bash_command='date',
-    dag=dag,
-)
+    # t1, t2 and t3 are examples of tasks created by instantiating operators
+    # [START basic_task]
+    t1 = BashOperator(
+        task_id='print_date',
+        bash_command='date',
+    )
 
-t2 = BashOperator(
-    task_id='sleep',
-    depends_on_past=False,
-    bash_command='sleep 5',
-    retries=3,
-    dag=dag,
-)
-# [END basic_task]
+    t2 = BashOperator(
+        task_id='sleep',
+        depends_on_past=False,
+        bash_command='sleep 5',
+        retries=3,
+    )
+    # [END basic_task]
 
-# [START documentation]
-dag.doc_md = __doc__
+    # [START documentation]
+    dag.doc_md = __doc__
 
-t1.doc_md = """\
-#### Task Documentation
-You can document your task using the attributes `doc_md` (markdown),
-`doc` (plain text), `doc_rst`, `doc_json`, `doc_yaml` which gets
-rendered in the UI's Task Instance Details page.
-![img](http://montcs.bloomu.edu/~bobmon/Semesters/2012-01/491/import%20soul.png)
-"""
-# [END documentation]
+    t1.doc_md = dedent(
+        """\
+    #### Task Documentation
+    You can document your task using the attributes `doc_md` (markdown),
+    `doc` (plain text), `doc_rst`, `doc_json`, `doc_yaml` which gets
+    rendered in the UI's Task Instance Details page.
+    ![img](http://montcs.bloomu.edu/~bobmon/Semesters/2012-01/491/import%20soul.png)
+    """
+    )
+    # [END documentation]
 
-# [START jinja_template]
-templated_command = """
-{% for i in range(5) %}
-    echo "{{ ds }}"
-    echo "{{ macros.ds_add(ds, 7)}}"
-    echo "{{ params.my_param }}"
-{% endfor %}
-"""
+    # [START jinja_template]
+    templated_command = dedent(
+        """
+    {% for i in range(5) %}
+        echo "{{ ds }}"
+        echo "{{ macros.ds_add(ds, 7)}}"
+        echo "{{ params.my_param }}"
+    {% endfor %}
+    """
+    )
 
-t3 = BashOperator(
-    task_id='templated',
-    depends_on_past=False,
-    bash_command=templated_command,
-    params={'my_param': 'Parameter I passed in'},
-    dag=dag,
-)
-# [END jinja_template]
+    t3 = BashOperator(
+        task_id='templated',
+        depends_on_past=False,
+        bash_command=templated_command,
+        params={'my_param': 'Parameter I passed in'},
+    )
+    # [END jinja_template]
 
-t1 >> [t2, t3]
+    t1 >> [t2, t3]
 # [END tutorial]
diff --git a/airflow/example_dags/tutorial_etl_dag.py b/airflow/example_dags/tutorial_etl_dag.py
index 48b519b..8b45600 100644
--- a/airflow/example_dags/tutorial_etl_dag.py
+++ b/airflow/example_dags/tutorial_etl_dag.py
@@ -27,6 +27,7 @@ as part of the documentation that goes along with the Airflow Functional DAG tut
 # [START tutorial]
 # [START import_module]
 import json
+from textwrap import dedent
 
 # The DAG object; we'll need this to instantiate a DAG
 from airflow import DAG
@@ -98,33 +99,39 @@ with DAG(
         task_id='extract',
         python_callable=extract,
     )
-    extract_task.doc_md = """\
-#### Extract task
-A simple Extract task to get data ready for the rest of the data pipeline.
-In this case, getting data is simulated by reading from a hardcoded JSON string.
-This data is then put into xcom, so that it can be processed by the next task.
-"""
+    extract_task.doc_md = dedent(
+        """\
+    #### Extract task
+    A simple Extract task to get data ready for the rest of the data pipeline.
+    In this case, getting data is simulated by reading from a hardcoded JSON string.
+    This data is then put into xcom, so that it can be processed by the next task.
+    """
+    )
 
     transform_task = PythonOperator(
         task_id='transform',
         python_callable=transform,
     )
-    transform_task.doc_md = """\
-#### Transform task
-A simple Transform task which takes in the collection of order data from xcom
-and computes the total order value.
-This computed value is then put into xcom, so that it can be processed by the next task.
-"""
+    transform_task.doc_md = dedent(
+        """\
+    #### Transform task
+    A simple Transform task which takes in the collection of order data from xcom
+    and computes the total order value.
+    This computed value is then put into xcom, so that it can be processed by the next task.
+    """
+    )
 
     load_task = PythonOperator(
         task_id='load',
         python_callable=load,
     )
-    load_task.doc_md = """\
-#### Load task
-A simple Load task which takes in the result of the Transform task, by reading it
-from xcom and instead of saving it to end user review, just prints it out.
-"""
+    load_task.doc_md = dedent(
+        """\
+    #### Load task
+    A simple Load task which takes in the result of the Transform task, by reading it
+    from xcom and instead of saving it to end user review, just prints it out.
+    """
+    )
 
     extract_task >> transform_task >> load_task
 
diff --git a/docs/apache-airflow/executor/kubernetes.rst b/docs/apache-airflow/executor/kubernetes.rst
index 9b774cf..a0df9db 100644
--- a/docs/apache-airflow/executor/kubernetes.rst
+++ b/docs/apache-airflow/executor/kubernetes.rst
@@ -120,6 +120,7 @@ name ``base`` and a second container containing your desired sidecar.
 
 .. exampleinclude:: /../../airflow/example_dags/example_kubernetes_executor_config.py
     :language: python
+    :dedent: 8
     :start-after: [START task_with_sidecar]
     :end-before: [END task_with_sidecar]
 
@@ -130,6 +131,7 @@ Here is an example of a task with both features:
 
 .. exampleinclude:: /../../airflow/example_dags/example_kubernetes_executor_config.py
     :language: python
+    :dedent: 8
     :start-after: [START task_with_template]
     :end-before: [END task_with_template]
 
diff --git a/docs/apache-airflow/howto/operator/bash.rst b/docs/apache-airflow/howto/operator/bash.rst
index c8a923f..3d2195f 100644
--- a/docs/apache-airflow/howto/operator/bash.rst
+++ b/docs/apache-airflow/howto/operator/bash.rst
@@ -27,6 +27,7 @@ commands in a `Bash <https://www.gnu.org/software/bash/>`__ shell.
 
 .. exampleinclude:: /../../airflow/example_dags/example_bash_operator.py
     :language: python
+    :dedent: 4
     :start-after: [START howto_operator_bash]
     :end-before: [END howto_operator_bash]
 
@@ -38,6 +39,7 @@ You can use :ref:`Jinja templates <jinja-templating>` to parameterize the
 
 .. exampleinclude:: /../../airflow/example_dags/example_bash_operator.py
     :language: python
+    :dedent: 4
     :start-after: [START howto_operator_bash_template]
     :end-before: [END howto_operator_bash_template]
 
diff --git a/docs/apache-airflow/howto/operator/external_task_sensor.rst b/docs/apache-airflow/howto/operator/external_task_sensor.rst
index eec8074..420bd13 100644
--- a/docs/apache-airflow/howto/operator/external_task_sensor.rst
+++ b/docs/apache-airflow/howto/operator/external_task_sensor.rst
@@ -46,6 +46,7 @@ via ``allowed_states`` and ``failed_states`` parameters.
 
 .. exampleinclude:: /../../airflow/example_dags/example_external_task_marker_dag.py
     :language: python
+    :dedent: 4
     :start-after: [START howto_operator_external_task_sensor]
     :end-before: [END howto_operator_external_task_sensor]
 
@@ -60,5 +61,6 @@ user clears ``parent_task``.
 
 .. exampleinclude:: /../../airflow/example_dags/example_external_task_marker_dag.py
     :language: python
+    :dedent: 4
     :start-after: [START howto_operator_external_task_marker]
     :end-before: [END howto_operator_external_task_marker]
diff --git a/docs/apache-airflow/howto/operator/python.rst b/docs/apache-airflow/howto/operator/python.rst
index 7f4d2b8..4a59df6 100644
--- a/docs/apache-airflow/howto/operator/python.rst
+++ b/docs/apache-airflow/howto/operator/python.rst
@@ -27,6 +27,7 @@ Python callables.
 
 .. exampleinclude:: /../../airflow/example_dags/example_python_operator.py
     :language: python
+    :dedent: 4
     :start-after: [START howto_operator_python]
     :end-before: [END howto_operator_python]
 
@@ -38,6 +39,7 @@ to the Python callable.
 
 .. exampleinclude:: /../../airflow/example_dags/example_python_operator.py
     :language: python
+    :dedent: 4
     :start-after: [START howto_operator_python_kwargs]
     :end-before: [END howto_operator_python_kwargs]
 
@@ -63,6 +65,7 @@ Python callables inside a new Python virtual environment.
 
 .. exampleinclude:: /../../airflow/example_dags/example_python_operator.py
     :language: python
+    :dedent: 4
     :start-after: [START howto_operator_python_venv]
     :end-before: [END howto_operator_python_venv]
 
diff --git a/docs/apache-airflow/tutorial.rst b/docs/apache-airflow/tutorial.rst
index 9324014..3a6b7ce9 100644
--- a/docs/apache-airflow/tutorial.rst
+++ b/docs/apache-airflow/tutorial.rst
@@ -109,6 +109,7 @@ instantiated from an operator is called a task. The first argument
 
 .. exampleinclude:: /../../airflow/example_dags/tutorial.py
     :language: python
+    :dedent: 4
     :start-after: [START basic_task]
     :end-before: [END basic_task]
 
@@ -144,6 +145,7 @@ stamp").
 
 .. exampleinclude:: /../../airflow/example_dags/tutorial.py
     :language: python
+    :dedent: 4
     :start-after: [START jinja_template]
     :end-before: [END jinja_template]
 
@@ -186,6 +188,7 @@ json, yaml.
 
 .. exampleinclude:: /../../airflow/example_dags/tutorial.py
     :language: python
+    :dedent: 4
     :start-after: [START documentation]
     :end-before: [END documentation]
 
diff --git a/docs/apache-airflow/tutorial_taskflow_api.rst b/docs/apache-airflow/tutorial_taskflow_api.rst
index cea1438..b089e03 100644
--- a/docs/apache-airflow/tutorial_taskflow_api.rst
+++ b/docs/apache-airflow/tutorial_taskflow_api.rst
@@ -69,6 +69,7 @@ as shown below. The function name acts as a unique identifier for the task.
 
 .. exampleinclude:: /../../airflow/example_dags/tutorial_taskflow_api_etl.py
     :language: python
+    :dedent: 4
     :start-after: [START extract]
     :end-before: [END extract]
 
@@ -83,6 +84,7 @@ we can move to the main part of the DAG.
 
 .. exampleinclude:: /../../airflow/example_dags/tutorial_taskflow_api_etl.py
     :language: python
+    :dedent: 4
     :start-after: [START main_flow]
     :end-before: [END main_flow]
 
@@ -119,6 +121,7 @@ in the middle of the data pipeline. In Airflow 1.x, this task is defined as show
 
 .. exampleinclude:: /../../airflow/example_dags/tutorial_etl_dag.py
     :language: python
+    :dedent: 4
     :start-after: [START transform_function]
     :end-before: [END transform_function]
 
@@ -130,6 +133,7 @@ Contrasting that with Taskflow API in Airflow 2.0 as shown below.
 
 .. exampleinclude:: /../../airflow/example_dags/tutorial_taskflow_api_etl.py
     :language: python
+    :dedent: 4
     :start-after: [START transform]
     :end-before: [END transform]
 
@@ -143,6 +147,7 @@ dependencies specified as shown below.
 
 .. exampleinclude:: /../../airflow/example_dags/tutorial_etl_dag.py
     :language: python
+    :dedent: 4
     :start-after: [START main_flow]
     :end-before: [END main_flow]
 
@@ -151,6 +156,7 @@ the dependencies as shown below.
 
 .. exampleinclude:: /../../airflow/example_dags/tutorial_taskflow_api_etl.py
     :language: python
+    :dedent: 4
     :start-after: [START main_flow]
     :end-before: [END main_flow]
 


[airflow] 30/41: Support google-cloud-monitoring>=2.0.0 (#13769)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit acfe4ae7988476f79cd63f21d3fd8de4336daa42
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Tue Feb 2 07:01:55 2021 +0100

    Support google-cloud-monitoring>=2.0.0 (#13769)
    
    (cherry picked from commit d2efb33239d36e58fb69066fd23779724cb11a90)
---
 airflow/providers/google/ADDITIONAL_INFO.md        |   1 +
 .../cloud/example_dags/example_stackdriver.py      |  82 +++++--
 .../providers/google/cloud/hooks/stackdriver.py    | 133 +++++------
 .../google/cloud/operators/stackdriver.py          |  12 +-
 setup.py                                           |   2 +-
 .../google/cloud/hooks/test_stackdriver.py         | 242 +++++++++++----------
 .../google/cloud/operators/test_stackdriver.py     |  49 ++++-
 7 files changed, 302 insertions(+), 219 deletions(-)

diff --git a/airflow/providers/google/ADDITIONAL_INFO.md b/airflow/providers/google/ADDITIONAL_INFO.md
index 16a6683..9cf9853 100644
--- a/airflow/providers/google/ADDITIONAL_INFO.md
+++ b/airflow/providers/google/ADDITIONAL_INFO.md
@@ -34,6 +34,7 @@ Details are covered in the UPDATING.md files for each library, but there are som
 | [``google-cloud-datacatalog``](https://pypi.org/project/google-cloud-datacatalog/) | ``>=0.5.0,<0.8`` | ``>=3.0.0,<4.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-datacatalog/blob/master/UPGRADING.md) |
 | [``google-cloud-dataproc``](https://pypi.org/project/google-cloud-dataproc/) | ``>=1.0.1,<2.0.0`` | ``>=2.2.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-dataproc/blob/master/UPGRADING.md) |
 | [``google-cloud-kms``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-kms/blob/master/UPGRADING.md) |
+| [``google-cloud-monitoring``](https://pypi.org/project/google-cloud-monitoring/) | ``>=0.34.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-monitoring/blob/master/UPGRADING.md) |
 | [``google-cloud-os-login``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-oslogin/blob/master/UPGRADING.md) |
 | [``google-cloud-pubsub``](https://pypi.org/project/google-cloud-pubsub/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-pubsub/blob/master/UPGRADING.md) |
 | [``google-cloud-tasks``](https://pypi.org/project/google-cloud-tasks/) | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-tasks/blob/master/UPGRADING.md) |
diff --git a/airflow/providers/google/cloud/example_dags/example_stackdriver.py b/airflow/providers/google/cloud/example_dags/example_stackdriver.py
index 68ac978..9c418b7 100644
--- a/airflow/providers/google/cloud/example_dags/example_stackdriver.py
+++ b/airflow/providers/google/cloud/example_dags/example_stackdriver.py
@@ -21,6 +21,7 @@ Example Airflow DAG for Google Cloud Stackdriver service.
 """
 
 import json
+import os
 
 from airflow import models
 from airflow.providers.google.cloud.operators.stackdriver import (
@@ -37,56 +38,80 @@ from airflow.providers.google.cloud.operators.stackdriver import (
 )
 from airflow.utils.dates import days_ago
 
+PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
+
 TEST_ALERT_POLICY_1 = {
     "combiner": "OR",
-    "name": "projects/sd-project/alertPolicies/12345",
-    "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"},
     "enabled": True,
-    "displayName": "test alert 1",
+    "display_name": "test alert 1",
     "conditions": [
         {
-            "conditionThreshold": {
+            "condition_threshold": {
+                "filter": (
+                    'metric.label.state="blocked" AND '
+                    'metric.type="agent.googleapis.com/processes/count_by_state" '
+                    'AND resource.type="gce_instance"'
+                ),
                 "comparison": "COMPARISON_GT",
-                "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}],
+                "threshold_value": 100,
+                "duration": {'seconds': 900},
+                "trigger": {"percent": 0},
+                "aggregations": [
+                    {
+                        "alignment_period": {'seconds': 60},
+                        "per_series_aligner": "ALIGN_MEAN",
+                        "cross_series_reducer": "REDUCE_MEAN",
+                        "group_by_fields": ["project", "resource.label.instance_id", "resource.label.zone"],
+                    }
+                ],
             },
-            "displayName": "Condition display",
-            "name": "projects/sd-project/alertPolicies/123/conditions/456",
+            "display_name": "test_alert_policy_1",
         }
     ],
 }
 
 TEST_ALERT_POLICY_2 = {
     "combiner": "OR",
-    "name": "projects/sd-project/alertPolicies/6789",
-    "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"},
     "enabled": False,
-    "displayName": "test alert 2",
+    "display_name": "test alert 2",
     "conditions": [
         {
-            "conditionThreshold": {
+            "condition_threshold": {
+                "filter": (
+                    'metric.label.state="blocked" AND '
+                    'metric.type="agent.googleapis.com/processes/count_by_state" AND '
+                    'resource.type="gce_instance"'
+                ),
                 "comparison": "COMPARISON_GT",
-                "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}],
+                "threshold_value": 100,
+                "duration": {'seconds': 900},
+                "trigger": {"percent": 0},
+                "aggregations": [
+                    {
+                        "alignment_period": {'seconds': 60},
+                        "per_series_aligner": "ALIGN_MEAN",
+                        "cross_series_reducer": "REDUCE_MEAN",
+                        "group_by_fields": ["project", "resource.label.instance_id", "resource.label.zone"],
+                    }
+                ],
             },
-            "displayName": "Condition display",
-            "name": "projects/sd-project/alertPolicies/456/conditions/789",
+            "display_name": "test_alert_policy_2",
         }
     ],
 }
 
 TEST_NOTIFICATION_CHANNEL_1 = {
-    "displayName": "channel1",
+    "display_name": "channel1",
     "enabled": True,
     "labels": {"auth_token": "top-secret", "channel_name": "#channel"},
-    "name": "projects/sd-project/notificationChannels/12345",
-    "type": "slack",
+    "type_": "slack",
 }
 
 TEST_NOTIFICATION_CHANNEL_2 = {
-    "displayName": "channel2",
+    "display_name": "channel2",
     "enabled": False,
     "labels": {"auth_token": "top-secret", "channel_name": "#channel"},
-    "name": "projects/sd-project/notificationChannels/6789",
-    "type": "slack",
+    "type_": "slack",
 }
 
 with models.DAG(
@@ -150,18 +175,29 @@ with models.DAG(
     # [START howto_operator_gcp_stackdriver_delete_notification_channel]
     delete_notification_channel = StackdriverDeleteNotificationChannelOperator(
         task_id='delete-notification-channel',
-        name='test-channel',
+        name="{{ task_instance.xcom_pull('list-notification-channel')[0]['name'] }}",
     )
     # [END howto_operator_gcp_stackdriver_delete_notification_channel]
 
+    delete_notification_channel_2 = StackdriverDeleteNotificationChannelOperator(
+        task_id='delete-notification-channel-2',
+        name="{{ task_instance.xcom_pull('list-notification-channel')[1]['name'] }}",
+    )
+
     # [START howto_operator_gcp_stackdriver_delete_alert_policy]
     delete_alert_policy = StackdriverDeleteAlertOperator(
         task_id='delete-alert-policy',
-        name='test-alert',
+        name="{{ task_instance.xcom_pull('list-alert-policies')[0]['name'] }}",
     )
     # [END howto_operator_gcp_stackdriver_delete_alert_policy]
 
+    delete_alert_policy_2 = StackdriverDeleteAlertOperator(
+        task_id='delete-alert-policy-2',
+        name="{{ task_instance.xcom_pull('list-alert-policies')[1]['name'] }}",
+    )
+
     create_notification_channel >> enable_notification_channel >> disable_notification_channel
     disable_notification_channel >> list_notification_channel >> create_alert_policy
     create_alert_policy >> enable_alert_policy >> disable_alert_policy >> list_alert_policies
-    list_alert_policies >> delete_notification_channel >> delete_alert_policy
+    list_alert_policies >> delete_notification_channel >> delete_notification_channel_2
+    delete_notification_channel_2 >> delete_alert_policy >> delete_alert_policy_2
diff --git a/airflow/providers/google/cloud/hooks/stackdriver.py b/airflow/providers/google/cloud/hooks/stackdriver.py
index 9da1afa..04dc329 100644
--- a/airflow/providers/google/cloud/hooks/stackdriver.py
+++ b/airflow/providers/google/cloud/hooks/stackdriver.py
@@ -24,7 +24,8 @@ from typing import Any, Optional, Sequence, Union
 from google.api_core.exceptions import InvalidArgument
 from google.api_core.gapic_v1.method import DEFAULT
 from google.cloud import monitoring_v3
-from google.protobuf.json_format import MessageToDict, MessageToJson, Parse
+from google.cloud.monitoring_v3 import AlertPolicy, NotificationChannel
+from google.protobuf.field_mask_pb2 import FieldMask
 from googleapiclient.errors import HttpError
 
 from airflow.exceptions import AirflowException
@@ -110,18 +111,20 @@ class StackdriverHook(GoogleBaseHook):
         """
         client = self._get_policy_client()
         policies_ = client.list_alert_policies(
-            name=f'projects/{project_id}',
-            filter_=filter_,
-            order_by=order_by,
-            page_size=page_size,
+            request={
+                'name': f'projects/{project_id}',
+                'filter': filter_,
+                'order_by': order_by,
+                'page_size': page_size,
+            },
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         if format_ == "dict":
-            return [MessageToDict(policy) for policy in policies_]
+            return [AlertPolicy.to_dict(policy) for policy in policies_]
         elif format_ == "json":
-            return [MessageToJson(policy) for policy in policies_]
+            return [AlertPolicy.to_jsoon(policy) for policy in policies_]
         else:
             return policies_
 
@@ -138,12 +141,14 @@ class StackdriverHook(GoogleBaseHook):
         client = self._get_policy_client()
         policies_ = self.list_alert_policies(project_id=project_id, filter_=filter_)
         for policy in policies_:
-            if policy.enabled.value != bool(new_state):
-                policy.enabled.value = bool(new_state)
-                mask = monitoring_v3.types.field_mask_pb2.FieldMask()
-                mask.paths.append('enabled')  # pylint: disable=no-member
+            if policy.enabled != bool(new_state):
+                policy.enabled = bool(new_state)
+                mask = FieldMask(paths=['enabled'])
                 client.update_alert_policy(
-                    alert_policy=policy, update_mask=mask, retry=retry, timeout=timeout, metadata=metadata
+                    request={'alert_policy': policy, 'update_mask': mask},
+                    retry=retry,
+                    timeout=timeout,
+                    metadata=metadata or (),
                 )
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -265,40 +270,39 @@ class StackdriverHook(GoogleBaseHook):
         ]
         policies_ = []
         channels = []
-
-        for channel in record["channels"]:
-            channel_json = json.dumps(channel)
-            channels.append(Parse(channel_json, monitoring_v3.types.notification_pb2.NotificationChannel()))
-        for policy in record["policies"]:
-            policy_json = json.dumps(policy)
-            policies_.append(Parse(policy_json, monitoring_v3.types.alert_pb2.AlertPolicy()))
+        for channel in record.get("channels", []):
+            channels.append(NotificationChannel(**channel))
+        for policy in record.get("policies", []):
+            policies_.append(AlertPolicy(**policy))
 
         channel_name_map = {}
 
         for channel in channels:
             channel.verification_status = (
-                monitoring_v3.enums.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED
+                monitoring_v3.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED
             )
 
             if channel.name in existing_channels:
                 channel_client.update_notification_channel(
-                    notification_channel=channel, retry=retry, timeout=timeout, metadata=metadata
+                    request={'notification_channel': channel},
+                    retry=retry,
+                    timeout=timeout,
+                    metadata=metadata or (),
                 )
             else:
                 old_name = channel.name
-                channel.ClearField('name')
+                channel.name = None
                 new_channel = channel_client.create_notification_channel(
-                    name=f'projects/{project_id}',
-                    notification_channel=channel,
+                    request={'name': f'projects/{project_id}', 'notification_channel': channel},
                     retry=retry,
                     timeout=timeout,
-                    metadata=metadata,
+                    metadata=metadata or (),
                 )
                 channel_name_map[old_name] = new_channel.name
 
         for policy in policies_:
-            policy.ClearField('creation_record')
-            policy.ClearField('mutation_record')
+            policy.creation_record = None
+            policy.mutation_record = None
 
             for i, channel in enumerate(policy.notification_channels):
                 new_channel = channel_name_map.get(channel)
@@ -308,20 +312,22 @@ class StackdriverHook(GoogleBaseHook):
             if policy.name in existing_policies:
                 try:
                     policy_client.update_alert_policy(
-                        alert_policy=policy, retry=retry, timeout=timeout, metadata=metadata
+                        request={'alert_policy': policy},
+                        retry=retry,
+                        timeout=timeout,
+                        metadata=metadata or (),
                     )
                 except InvalidArgument:
                     pass
             else:
-                policy.ClearField('name')
+                policy.name = None
                 for condition in policy.conditions:
-                    condition.ClearField('name')
+                    condition.name = None
                 policy_client.create_alert_policy(
-                    name=f'projects/{project_id}',
-                    alert_policy=policy,
+                    request={'name': f'projects/{project_id}', 'alert_policy': policy},
                     retry=retry,
                     timeout=timeout,
-                    metadata=None,
+                    metadata=metadata or (),
                 )
 
     def delete_alert_policy(
@@ -349,7 +355,9 @@ class StackdriverHook(GoogleBaseHook):
         """
         policy_client = self._get_policy_client()
         try:
-            policy_client.delete_alert_policy(name=name, retry=retry, timeout=timeout, metadata=metadata)
+            policy_client.delete_alert_policy(
+                request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
+            )
         except HttpError as err:
             raise AirflowException(f'Delete alerting policy failed. Error was {err.content}')
 
@@ -405,18 +413,20 @@ class StackdriverHook(GoogleBaseHook):
         """
         client = self._get_channel_client()
         channels = client.list_notification_channels(
-            name=f'projects/{project_id}',
-            filter_=filter_,
-            order_by=order_by,
-            page_size=page_size,
+            request={
+                'name': f'projects/{project_id}',
+                'filter': filter_,
+                'order_by': order_by,
+                'page_size': page_size,
+            },
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         if format_ == "dict":
-            return [MessageToDict(channel) for channel in channels]
+            return [NotificationChannel.to_dict(channel) for channel in channels]
         elif format_ == "json":
-            return [MessageToJson(channel) for channel in channels]
+            return [NotificationChannel.to_json(channel) for channel in channels]
         else:
             return channels
 
@@ -431,18 +441,18 @@ class StackdriverHook(GoogleBaseHook):
         metadata: Optional[str] = None,
     ) -> None:
         client = self._get_channel_client()
-        channels = client.list_notification_channels(name=f'projects/{project_id}', filter_=filter_)
+        channels = client.list_notification_channels(
+            request={'name': f'projects/{project_id}', 'filter': filter_}
+        )
         for channel in channels:
-            if channel.enabled.value != bool(new_state):
-                channel.enabled.value = bool(new_state)
-                mask = monitoring_v3.types.field_mask_pb2.FieldMask()
-                mask.paths.append('enabled')  # pylint: disable=no-member
+            if channel.enabled != bool(new_state):
+                channel.enabled = bool(new_state)
+                mask = FieldMask(paths=['enabled'])
                 client.update_notification_channel(
-                    notification_channel=channel,
-                    update_mask=mask,
+                    request={'notification_channel': channel, 'update_mask': mask},
                     retry=retry,
                     timeout=timeout,
-                    metadata=metadata,
+                    metadata=metadata or (),
                 )
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -518,7 +528,7 @@ class StackdriverHook(GoogleBaseHook):
             new_state=False,
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -562,29 +572,28 @@ class StackdriverHook(GoogleBaseHook):
         channel_name_map = {}
 
         for channel in record["channels"]:
-            channel_json = json.dumps(channel)
-            channels_list.append(
-                Parse(channel_json, monitoring_v3.types.notification_pb2.NotificationChannel())
-            )
+            channels_list.append(NotificationChannel(**channel))
 
         for channel in channels_list:
             channel.verification_status = (
-                monitoring_v3.enums.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED
+                monitoring_v3.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED
             )
 
             if channel.name in existing_channels:
                 channel_client.update_notification_channel(
-                    notification_channel=channel, retry=retry, timeout=timeout, metadata=metadata
+                    request={'notification_channel': channel},
+                    retry=retry,
+                    timeout=timeout,
+                    metadata=metadata or (),
                 )
             else:
                 old_name = channel.name
-                channel.ClearField('name')
+                channel.name = None
                 new_channel = channel_client.create_notification_channel(
-                    name=f'projects/{project_id}',
-                    notification_channel=channel,
+                    request={'name': f'projects/{project_id}', 'notification_channel': channel},
                     retry=retry,
                     timeout=timeout,
-                    metadata=metadata,
+                    metadata=metadata or (),
                 )
                 channel_name_map[old_name] = new_channel.name
 
@@ -616,7 +625,7 @@ class StackdriverHook(GoogleBaseHook):
         channel_client = self._get_channel_client()
         try:
             channel_client.delete_notification_channel(
-                name=name, retry=retry, timeout=timeout, metadata=metadata
+                request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
             )
         except HttpError as err:
             raise AirflowException(f'Delete notification channel failed. Error was {err.content}')
diff --git a/airflow/providers/google/cloud/operators/stackdriver.py b/airflow/providers/google/cloud/operators/stackdriver.py
index dc86466..7289b12 100644
--- a/airflow/providers/google/cloud/operators/stackdriver.py
+++ b/airflow/providers/google/cloud/operators/stackdriver.py
@@ -19,6 +19,7 @@
 from typing import Optional, Sequence, Union
 
 from google.api_core.gapic_v1.method import DEFAULT
+from google.cloud.monitoring_v3 import AlertPolicy, NotificationChannel
 
 from airflow.models import BaseOperator
 from airflow.providers.google.cloud.hooks.stackdriver import StackdriverHook
@@ -125,7 +126,7 @@ class StackdriverListAlertPoliciesOperator(BaseOperator):
 
     def execute(self, context):
         self.log.info(
-            'List Alert Policies: Project id: %s Format: %s Filter: %s Order By: %s Page Size: %d',
+            'List Alert Policies: Project id: %s Format: %s Filter: %s Order By: %s Page Size: %s',
             self.project_id,
             self.format_,
             self.filter_,
@@ -139,7 +140,7 @@ class StackdriverListAlertPoliciesOperator(BaseOperator):
                 impersonation_chain=self.impersonation_chain,
             )
 
-        return self.hook.list_alert_policies(
+        result = self.hook.list_alert_policies(
             project_id=self.project_id,
             format_=self.format_,
             filter_=self.filter_,
@@ -149,6 +150,7 @@ class StackdriverListAlertPoliciesOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
+        return [AlertPolicy.to_dict(policy) for policy in result]
 
 
 class StackdriverEnableAlertPoliciesOperator(BaseOperator):
@@ -614,7 +616,7 @@ class StackdriverListNotificationChannelsOperator(BaseOperator):
 
     def execute(self, context):
         self.log.info(
-            'List Notification Channels: Project id: %s Format: %s Filter: %s Order By: %s Page Size: %d',
+            'List Notification Channels: Project id: %s Format: %s Filter: %s Order By: %s Page Size: %s',
             self.project_id,
             self.format_,
             self.filter_,
@@ -627,7 +629,7 @@ class StackdriverListNotificationChannelsOperator(BaseOperator):
                 delegate_to=self.delegate_to,
                 impersonation_chain=self.impersonation_chain,
             )
-        return self.hook.list_notification_channels(
+        channels = self.hook.list_notification_channels(
             format_=self.format_,
             project_id=self.project_id,
             filter_=self.filter_,
@@ -637,6 +639,8 @@ class StackdriverListNotificationChannelsOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
+        result = [NotificationChannel.to_dict(channel) for channel in channels]
+        return result
 
 
 class StackdriverEnableNotificationChannelsOperator(BaseOperator):
diff --git a/setup.py b/setup.py
index 0f40d88..fa1e73a 100644
--- a/setup.py
+++ b/setup.py
@@ -294,7 +294,7 @@ google = [
     'google-cloud-language>=1.1.1,<2.0.0',
     'google-cloud-logging>=1.14.0,<2.0.0',
     'google-cloud-memcache>=0.2.0',
-    'google-cloud-monitoring>=0.34.0,<2.0.0',
+    'google-cloud-monitoring>=2.0.0,<3.0.0',
     'google-cloud-os-login>=2.0.0,<3.0.0',
     'google-cloud-pubsub>=2.0.0,<3.0.0',
     'google-cloud-redis>=2.0.0,<3.0.0',
diff --git a/tests/providers/google/cloud/hooks/test_stackdriver.py b/tests/providers/google/cloud/hooks/test_stackdriver.py
index 6892d05..10a3097 100644
--- a/tests/providers/google/cloud/hooks/test_stackdriver.py
+++ b/tests/providers/google/cloud/hooks/test_stackdriver.py
@@ -21,8 +21,8 @@ import unittest
 from unittest import mock
 
 from google.api_core.gapic_v1.method import DEFAULT
-from google.cloud import monitoring_v3
-from google.protobuf.json_format import ParseDict
+from google.cloud.monitoring_v3 import AlertPolicy, NotificationChannel
+from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow.providers.google.cloud.hooks import stackdriver
 
@@ -32,16 +32,15 @@ TEST_FILTER = "filter"
 TEST_ALERT_POLICY_1 = {
     "combiner": "OR",
     "name": "projects/sd-project/alertPolicies/12345",
-    "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"},
     "enabled": True,
-    "displayName": "test display",
+    "display_name": "test display",
     "conditions": [
         {
-            "conditionThreshold": {
+            "condition_threshold": {
                 "comparison": "COMPARISON_GT",
-                "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}],
+                "aggregations": [{"alignment_period": {'seconds': 60}, "per_series_aligner": "ALIGN_RATE"}],
             },
-            "displayName": "Condition display",
+            "display_name": "Condition display",
             "name": "projects/sd-project/alertPolicies/123/conditions/456",
         }
     ],
@@ -50,35 +49,34 @@ TEST_ALERT_POLICY_1 = {
 TEST_ALERT_POLICY_2 = {
     "combiner": "OR",
     "name": "projects/sd-project/alertPolicies/6789",
-    "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"},
     "enabled": False,
-    "displayName": "test display",
+    "display_name": "test display",
     "conditions": [
         {
-            "conditionThreshold": {
+            "condition_threshold": {
                 "comparison": "COMPARISON_GT",
-                "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}],
+                "aggregations": [{"alignment_period": {'seconds': 60}, "per_series_aligner": "ALIGN_RATE"}],
             },
-            "displayName": "Condition display",
+            "display_name": "Condition display",
             "name": "projects/sd-project/alertPolicies/456/conditions/789",
         }
     ],
 }
 
 TEST_NOTIFICATION_CHANNEL_1 = {
-    "displayName": "sd",
+    "display_name": "sd",
     "enabled": True,
     "labels": {"auth_token": "top-secret", "channel_name": "#channel"},
     "name": "projects/sd-project/notificationChannels/12345",
-    "type": "slack",
+    "type_": "slack",
 }
 
 TEST_NOTIFICATION_CHANNEL_2 = {
-    "displayName": "sd",
+    "display_name": "sd",
     "enabled": False,
     "labels": {"auth_token": "top-secret", "channel_name": "#channel"},
     "name": "projects/sd-project/notificationChannels/6789",
-    "type": "slack",
+    "type_": "slack",
 }
 
 
@@ -96,13 +94,10 @@ class TestStackdriverHookMethods(unittest.TestCase):
             project_id=PROJECT_ID,
         )
         method.assert_called_once_with(
-            name=f'projects/{PROJECT_ID}',
-            filter_=TEST_FILTER,
+            request=dict(name=f'projects/{PROJECT_ID}', filter=TEST_FILTER, order_by=None, page_size=None),
             retry=DEFAULT,
             timeout=DEFAULT,
-            order_by=None,
-            page_size=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
@@ -113,8 +108,8 @@ class TestStackdriverHookMethods(unittest.TestCase):
     def test_stackdriver_enable_alert_policy(self, mock_policy_client, mock_get_creds_and_project_id):
         hook = stackdriver.StackdriverHook()
 
-        alert_policy_enabled = ParseDict(TEST_ALERT_POLICY_1, monitoring_v3.types.alert_pb2.AlertPolicy())
-        alert_policy_disabled = ParseDict(TEST_ALERT_POLICY_2, monitoring_v3.types.alert_pb2.AlertPolicy())
+        alert_policy_enabled = AlertPolicy(**TEST_ALERT_POLICY_1)
+        alert_policy_disabled = AlertPolicy(**TEST_ALERT_POLICY_2)
 
         alert_policies = [alert_policy_enabled, alert_policy_disabled]
 
@@ -124,23 +119,18 @@ class TestStackdriverHookMethods(unittest.TestCase):
             project_id=PROJECT_ID,
         )
         mock_policy_client.return_value.list_alert_policies.assert_called_once_with(
-            name=f'projects/{PROJECT_ID}',
-            filter_=TEST_FILTER,
+            request=dict(name=f'projects/{PROJECT_ID}', filter=TEST_FILTER, order_by=None, page_size=None),
             retry=DEFAULT,
             timeout=DEFAULT,
-            order_by=None,
-            page_size=None,
-            metadata=None,
+            metadata=(),
         )
-        mask = monitoring_v3.types.field_mask_pb2.FieldMask()
-        alert_policy_disabled.enabled.value = True  # pylint: disable=no-member
-        mask.paths.append('enabled')  # pylint: disable=no-member
+        mask = FieldMask(paths=["enabled"])
+        alert_policy_disabled.enabled = True  # pylint: disable=no-member
         mock_policy_client.return_value.update_alert_policy.assert_called_once_with(
-            alert_policy=alert_policy_disabled,
-            update_mask=mask,
+            request=dict(alert_policy=alert_policy_disabled, update_mask=mask),
             retry=DEFAULT,
             timeout=DEFAULT,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
@@ -150,8 +140,8 @@ class TestStackdriverHookMethods(unittest.TestCase):
     @mock.patch('airflow.providers.google.cloud.hooks.stackdriver.StackdriverHook._get_policy_client')
     def test_stackdriver_disable_alert_policy(self, mock_policy_client, mock_get_creds_and_project_id):
         hook = stackdriver.StackdriverHook()
-        alert_policy_enabled = ParseDict(TEST_ALERT_POLICY_1, monitoring_v3.types.alert_pb2.AlertPolicy())
-        alert_policy_disabled = ParseDict(TEST_ALERT_POLICY_2, monitoring_v3.types.alert_pb2.AlertPolicy())
+        alert_policy_enabled = AlertPolicy(**TEST_ALERT_POLICY_1)
+        alert_policy_disabled = AlertPolicy(**TEST_ALERT_POLICY_2)
 
         mock_policy_client.return_value.list_alert_policies.return_value = [
             alert_policy_enabled,
@@ -162,23 +152,18 @@ class TestStackdriverHookMethods(unittest.TestCase):
             project_id=PROJECT_ID,
         )
         mock_policy_client.return_value.list_alert_policies.assert_called_once_with(
-            name=f'projects/{PROJECT_ID}',
-            filter_=TEST_FILTER,
+            request=dict(name=f'projects/{PROJECT_ID}', filter=TEST_FILTER, order_by=None, page_size=None),
             retry=DEFAULT,
             timeout=DEFAULT,
-            order_by=None,
-            page_size=None,
-            metadata=None,
+            metadata=(),
         )
-        mask = monitoring_v3.types.field_mask_pb2.FieldMask()
-        alert_policy_enabled.enabled.value = False  # pylint: disable=no-member
-        mask.paths.append('enabled')  # pylint: disable=no-member
+        mask = FieldMask(paths=["enabled"])
+        alert_policy_enabled.enabled = False  # pylint: disable=no-member
         mock_policy_client.return_value.update_alert_policy.assert_called_once_with(
-            alert_policy=alert_policy_enabled,
-            update_mask=mask,
+            request=dict(alert_policy=alert_policy_enabled, update_mask=mask),
             retry=DEFAULT,
             timeout=DEFAULT,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
@@ -191,8 +176,8 @@ class TestStackdriverHookMethods(unittest.TestCase):
         self, mock_channel_client, mock_policy_client, mock_get_creds_and_project_id
     ):
         hook = stackdriver.StackdriverHook()
-        existing_alert_policy = ParseDict(TEST_ALERT_POLICY_1, monitoring_v3.types.alert_pb2.AlertPolicy())
-        alert_policy_to_create = ParseDict(TEST_ALERT_POLICY_2, monitoring_v3.types.alert_pb2.AlertPolicy())
+        existing_alert_policy = AlertPolicy(**TEST_ALERT_POLICY_1)
+        alert_policy_to_create = AlertPolicy(**TEST_ALERT_POLICY_2)
 
         mock_policy_client.return_value.list_alert_policies.return_value = [existing_alert_policy]
         mock_channel_client.return_value.list_notification_channels.return_value = []
@@ -202,38 +187,77 @@ class TestStackdriverHookMethods(unittest.TestCase):
             project_id=PROJECT_ID,
         )
         mock_channel_client.return_value.list_notification_channels.assert_called_once_with(
-            name=f'projects/{PROJECT_ID}',
-            filter_=None,
+            request=dict(
+                name=f'projects/{PROJECT_ID}',
+                filter=None,
+                order_by=None,
+                page_size=None,
+            ),
             retry=DEFAULT,
             timeout=DEFAULT,
-            order_by=None,
-            page_size=None,
-            metadata=None,
+            metadata=(),
         )
         mock_policy_client.return_value.list_alert_policies.assert_called_once_with(
-            name=f'projects/{PROJECT_ID}',
-            filter_=None,
+            request=dict(name=f'projects/{PROJECT_ID}', filter=None, order_by=None, page_size=None),
             retry=DEFAULT,
             timeout=DEFAULT,
-            order_by=None,
-            page_size=None,
-            metadata=None,
+            metadata=(),
         )
-        alert_policy_to_create.ClearField('name')
-        alert_policy_to_create.ClearField('creation_record')
-        alert_policy_to_create.ClearField('mutation_record')
-        alert_policy_to_create.conditions[0].ClearField('name')  # pylint: disable=no-member
+        alert_policy_to_create.name = None
+        alert_policy_to_create.creation_record = None
+        alert_policy_to_create.mutation_record = None
+        alert_policy_to_create.conditions[0].name = None
         mock_policy_client.return_value.create_alert_policy.assert_called_once_with(
-            name=f'projects/{PROJECT_ID}',
-            alert_policy=alert_policy_to_create,
+            request=dict(
+                name=f'projects/{PROJECT_ID}',
+                alert_policy=alert_policy_to_create,
+            ),
             retry=DEFAULT,
             timeout=DEFAULT,
-            metadata=None,
+            metadata=(),
         )
-        existing_alert_policy.ClearField('creation_record')
-        existing_alert_policy.ClearField('mutation_record')
+        existing_alert_policy.creation_record = None
+        existing_alert_policy.mutation_record = None
         mock_policy_client.return_value.update_alert_policy.assert_called_once_with(
-            alert_policy=existing_alert_policy, retry=DEFAULT, timeout=DEFAULT, metadata=None
+            request=dict(alert_policy=existing_alert_policy), retry=DEFAULT, timeout=DEFAULT, metadata=()
+        )
+
+    @mock.patch(
+        'airflow.providers.google.common.hooks.base_google.GoogleBaseHook._get_credentials_and_project_id',
+        return_value=(CREDENTIALS, PROJECT_ID),
+    )
+    @mock.patch('airflow.providers.google.cloud.hooks.stackdriver.StackdriverHook._get_policy_client')
+    @mock.patch('airflow.providers.google.cloud.hooks.stackdriver.StackdriverHook._get_channel_client')
+    def test_stackdriver_upsert_alert_policy_without_channel(
+        self, mock_channel_client, mock_policy_client, mock_get_creds_and_project_id
+    ):
+        hook = stackdriver.StackdriverHook()
+        existing_alert_policy = AlertPolicy(**TEST_ALERT_POLICY_1)
+
+        mock_policy_client.return_value.list_alert_policies.return_value = [existing_alert_policy]
+        mock_channel_client.return_value.list_notification_channels.return_value = []
+
+        hook.upsert_alert(
+            alerts=json.dumps({"policies": [TEST_ALERT_POLICY_1, TEST_ALERT_POLICY_2]}),
+            project_id=PROJECT_ID,
+        )
+        mock_channel_client.return_value.list_notification_channels.assert_called_once_with(
+            request=dict(name=f'projects/{PROJECT_ID}', filter=None, order_by=None, page_size=None),
+            metadata=(),
+            retry=DEFAULT,
+            timeout=DEFAULT,
+        )
+        mock_policy_client.return_value.list_alert_policies.assert_called_once_with(
+            request=dict(name=f'projects/{PROJECT_ID}', filter=None, order_by=None, page_size=None),
+            retry=DEFAULT,
+            timeout=DEFAULT,
+            metadata=(),
+        )
+
+        existing_alert_policy.creation_record = None
+        existing_alert_policy.mutation_record = None
+        mock_policy_client.return_value.update_alert_policy.assert_called_once_with(
+            request=dict(alert_policy=existing_alert_policy), retry=DEFAULT, timeout=DEFAULT, metadata=()
         )
 
     @mock.patch(
@@ -247,10 +271,10 @@ class TestStackdriverHookMethods(unittest.TestCase):
             name='test-alert',
         )
         mock_policy_client.return_value.delete_alert_policy.assert_called_once_with(
-            name='test-alert',
+            request=dict(name='test-alert'),
             retry=DEFAULT,
             timeout=DEFAULT,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
@@ -265,13 +289,10 @@ class TestStackdriverHookMethods(unittest.TestCase):
             project_id=PROJECT_ID,
         )
         mock_channel_client.return_value.list_notification_channels.assert_called_once_with(
-            name=f'projects/{PROJECT_ID}',
-            filter_=TEST_FILTER,
-            order_by=None,
-            page_size=None,
+            request=dict(name=f'projects/{PROJECT_ID}', filter=TEST_FILTER, order_by=None, page_size=None),
             retry=DEFAULT,
             timeout=DEFAULT,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
@@ -283,12 +304,9 @@ class TestStackdriverHookMethods(unittest.TestCase):
         self, mock_channel_client, mock_get_creds_and_project_id
     ):
         hook = stackdriver.StackdriverHook()
-        notification_channel_enabled = ParseDict(
-            TEST_NOTIFICATION_CHANNEL_1, monitoring_v3.types.notification_pb2.NotificationChannel()
-        )
-        notification_channel_disabled = ParseDict(
-            TEST_NOTIFICATION_CHANNEL_2, monitoring_v3.types.notification_pb2.NotificationChannel()
-        )
+        notification_channel_enabled = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_1)
+        notification_channel_disabled = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_2)
+
         mock_channel_client.return_value.list_notification_channels.return_value = [
             notification_channel_enabled,
             notification_channel_disabled,
@@ -299,15 +317,13 @@ class TestStackdriverHookMethods(unittest.TestCase):
             project_id=PROJECT_ID,
         )
 
-        notification_channel_disabled.enabled.value = True  # pylint: disable=no-member
-        mask = monitoring_v3.types.field_mask_pb2.FieldMask()
-        mask.paths.append('enabled')  # pylint: disable=no-member
+        notification_channel_disabled.enabled = True  # pylint: disable=no-member
+        mask = FieldMask(paths=['enabled'])
         mock_channel_client.return_value.update_notification_channel.assert_called_once_with(
-            notification_channel=notification_channel_disabled,
-            update_mask=mask,
+            request=dict(notification_channel=notification_channel_disabled, update_mask=mask),
             retry=DEFAULT,
             timeout=DEFAULT,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
@@ -319,12 +335,8 @@ class TestStackdriverHookMethods(unittest.TestCase):
         self, mock_channel_client, mock_get_creds_and_project_id
     ):
         hook = stackdriver.StackdriverHook()
-        notification_channel_enabled = ParseDict(
-            TEST_NOTIFICATION_CHANNEL_1, monitoring_v3.types.notification_pb2.NotificationChannel()
-        )
-        notification_channel_disabled = ParseDict(
-            TEST_NOTIFICATION_CHANNEL_2, monitoring_v3.types.notification_pb2.NotificationChannel()
-        )
+        notification_channel_enabled = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_1)
+        notification_channel_disabled = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_2)
         mock_channel_client.return_value.list_notification_channels.return_value = [
             notification_channel_enabled,
             notification_channel_disabled,
@@ -335,15 +347,13 @@ class TestStackdriverHookMethods(unittest.TestCase):
             project_id=PROJECT_ID,
         )
 
-        notification_channel_enabled.enabled.value = False  # pylint: disable=no-member
-        mask = monitoring_v3.types.field_mask_pb2.FieldMask()
-        mask.paths.append('enabled')  # pylint: disable=no-member
+        notification_channel_enabled.enabled = False  # pylint: disable=no-member
+        mask = FieldMask(paths=['enabled'])
         mock_channel_client.return_value.update_notification_channel.assert_called_once_with(
-            notification_channel=notification_channel_enabled,
-            update_mask=mask,
+            request=dict(notification_channel=notification_channel_enabled, update_mask=mask),
             retry=DEFAULT,
             timeout=DEFAULT,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
@@ -353,12 +363,9 @@ class TestStackdriverHookMethods(unittest.TestCase):
     @mock.patch('airflow.providers.google.cloud.hooks.stackdriver.StackdriverHook._get_channel_client')
     def test_stackdriver_upsert_channel(self, mock_channel_client, mock_get_creds_and_project_id):
         hook = stackdriver.StackdriverHook()
-        existing_notification_channel = ParseDict(
-            TEST_NOTIFICATION_CHANNEL_1, monitoring_v3.types.notification_pb2.NotificationChannel()
-        )
-        notification_channel_to_be_created = ParseDict(
-            TEST_NOTIFICATION_CHANNEL_2, monitoring_v3.types.notification_pb2.NotificationChannel()
-        )
+        existing_notification_channel = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_1)
+        notification_channel_to_be_created = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_2)
+
         mock_channel_client.return_value.list_notification_channels.return_value = [
             existing_notification_channel
         ]
@@ -367,24 +374,25 @@ class TestStackdriverHookMethods(unittest.TestCase):
             project_id=PROJECT_ID,
         )
         mock_channel_client.return_value.list_notification_channels.assert_called_once_with(
-            name=f'projects/{PROJECT_ID}',
-            filter_=None,
-            order_by=None,
-            page_size=None,
+            request=dict(name=f'projects/{PROJECT_ID}', filter=None, order_by=None, page_size=None),
             retry=DEFAULT,
             timeout=DEFAULT,
-            metadata=None,
+            metadata=(),
         )
         mock_channel_client.return_value.update_notification_channel.assert_called_once_with(
-            notification_channel=existing_notification_channel, retry=DEFAULT, timeout=DEFAULT, metadata=None
+            request=dict(notification_channel=existing_notification_channel),
+            retry=DEFAULT,
+            timeout=DEFAULT,
+            metadata=(),
         )
-        notification_channel_to_be_created.ClearField('name')
+        notification_channel_to_be_created.name = None
         mock_channel_client.return_value.create_notification_channel.assert_called_once_with(
-            name=f'projects/{PROJECT_ID}',
-            notification_channel=notification_channel_to_be_created,
+            request=dict(
+                name=f'projects/{PROJECT_ID}', notification_channel=notification_channel_to_be_created
+            ),
             retry=DEFAULT,
             timeout=DEFAULT,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
@@ -400,5 +408,5 @@ class TestStackdriverHookMethods(unittest.TestCase):
             name='test-channel',
         )
         mock_channel_client.return_value.delete_notification_channel.assert_called_once_with(
-            name='test-channel', retry=DEFAULT, timeout=DEFAULT, metadata=None
+            request=dict(name='test-channel'), retry=DEFAULT, timeout=DEFAULT, metadata=()
         )
diff --git a/tests/providers/google/cloud/operators/test_stackdriver.py b/tests/providers/google/cloud/operators/test_stackdriver.py
index 28901b4..50dd997 100644
--- a/tests/providers/google/cloud/operators/test_stackdriver.py
+++ b/tests/providers/google/cloud/operators/test_stackdriver.py
@@ -21,6 +21,7 @@ import unittest
 from unittest import mock
 
 from google.api_core.gapic_v1.method import DEFAULT
+from google.cloud.monitoring_v3 import AlertPolicy, NotificationChannel
 
 from airflow.providers.google.cloud.operators.stackdriver import (
     StackdriverDeleteAlertOperator,
@@ -40,16 +41,15 @@ TEST_FILTER = 'filter'
 TEST_ALERT_POLICY_1 = {
     "combiner": "OR",
     "name": "projects/sd-project/alertPolicies/12345",
-    "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"},
     "enabled": True,
-    "displayName": "test display",
+    "display_name": "test display",
     "conditions": [
         {
-            "conditionThreshold": {
+            "condition_threshold": {
                 "comparison": "COMPARISON_GT",
-                "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}],
+                "aggregations": [{"alignment_eriod": {'seconds': 60}, "per_series_aligner": "ALIGN_RATE"}],
             },
-            "displayName": "Condition display",
+            "display_name": "Condition display",
             "name": "projects/sd-project/alertPolicies/123/conditions/456",
         }
     ],
@@ -58,16 +58,15 @@ TEST_ALERT_POLICY_1 = {
 TEST_ALERT_POLICY_2 = {
     "combiner": "OR",
     "name": "projects/sd-project/alertPolicies/6789",
-    "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"},
     "enabled": False,
-    "displayName": "test display",
+    "display_name": "test display",
     "conditions": [
         {
-            "conditionThreshold": {
+            "condition_threshold": {
                 "comparison": "COMPARISON_GT",
-                "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}],
+                "aggregations": [{"alignment_period": {'seconds': 60}, "per_series_aligner": "ALIGN_RATE"}],
             },
-            "displayName": "Condition display",
+            "display_name": "Condition display",
             "name": "projects/sd-project/alertPolicies/456/conditions/789",
         }
     ],
@@ -94,7 +93,8 @@ class TestStackdriverListAlertPoliciesOperator(unittest.TestCase):
     @mock.patch('airflow.providers.google.cloud.operators.stackdriver.StackdriverHook')
     def test_execute(self, mock_hook):
         operator = StackdriverListAlertPoliciesOperator(task_id=TEST_TASK_ID, filter_=TEST_FILTER)
-        operator.execute(None)
+        mock_hook.return_value.list_alert_policies.return_value = [AlertPolicy(name="test-name")]
+        result = operator.execute(None)
         mock_hook.return_value.list_alert_policies.assert_called_once_with(
             project_id=None,
             filter_=TEST_FILTER,
@@ -105,6 +105,16 @@ class TestStackdriverListAlertPoliciesOperator(unittest.TestCase):
             timeout=DEFAULT,
             metadata=None,
         )
+        assert [
+            {
+                'combiner': 0,
+                'conditions': [],
+                'display_name': '',
+                'name': 'test-name',
+                'notification_channels': [],
+                'user_labels': {},
+            }
+        ] == result
 
 
 class TestStackdriverEnableAlertPoliciesOperator(unittest.TestCase):
@@ -160,7 +170,11 @@ class TestStackdriverListNotificationChannelsOperator(unittest.TestCase):
     @mock.patch('airflow.providers.google.cloud.operators.stackdriver.StackdriverHook')
     def test_execute(self, mock_hook):
         operator = StackdriverListNotificationChannelsOperator(task_id=TEST_TASK_ID, filter_=TEST_FILTER)
-        operator.execute(None)
+        mock_hook.return_value.list_notification_channels.return_value = [
+            NotificationChannel(name="test-123")
+        ]
+
+        result = operator.execute(None)
         mock_hook.return_value.list_notification_channels.assert_called_once_with(
             project_id=None,
             filter_=TEST_FILTER,
@@ -171,6 +185,17 @@ class TestStackdriverListNotificationChannelsOperator(unittest.TestCase):
             timeout=DEFAULT,
             metadata=None,
         )
+        assert [
+            {
+                'description': '',
+                'display_name': '',
+                'labels': {},
+                'name': 'test-123',
+                'type_': '',
+                'user_labels': {},
+                'verification_status': 0,
+            }
+        ] == result
 
 
 class TestStackdriverEnableNotificationChannelsOperator(unittest.TestCase):


[airflow] 39/41: fixup! Switch to f-strings using flynt. (#13732)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit b05faa4c8ad979375d93c472cdb72628801a0ede
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Mar 3 06:06:41 2021 +0100

    fixup! Switch to f-strings using flynt. (#13732)
---
 .../providers/snowflake/transfers/s3_to_snowflake.py   | 18 +++++++-----------
 tests/providers/google/cloud/operators/test_pubsub.py  |  2 +-
 tests/providers/google/cloud/sensors/test_pubsub.py    |  2 +-
 .../snowflake/transfers/test_s3_to_snowflake.py        | 18 ++++++------------
 4 files changed, 15 insertions(+), 25 deletions(-)

diff --git a/airflow/providers/snowflake/transfers/s3_to_snowflake.py b/airflow/providers/snowflake/transfers/s3_to_snowflake.py
index 2238718..27f8810 100644
--- a/airflow/providers/snowflake/transfers/s3_to_snowflake.py
+++ b/airflow/providers/snowflake/transfers/s3_to_snowflake.py
@@ -82,13 +82,11 @@ class S3ToSnowflakeOperator(BaseOperator):
         files = files.replace(']', ')')
 
         # we can extend this based on stage
-        base_sql = """
-                    FROM @{stage}/
+        base_sql = f"""
+                    FROM @{self.stage}/
                     files={files}
-                    file_format={file_format}
-                """.format(
-            stage=self.stage, files=files, file_format=self.file_format
-        )
+                    file_format={self.file_format}
+                """
 
         if self.columns_array:
             copy_query = """
@@ -97,11 +95,9 @@ class S3ToSnowflakeOperator(BaseOperator):
                 schema=self.schema, table=self.table, columns=",".join(self.columns_array), base_sql=base_sql
             )
         else:
-            copy_query = """
-                COPY INTO {schema}.{table} {base_sql}
-            """.format(
-                schema=self.schema, table=self.table, base_sql=base_sql
-            )
+            copy_query = f"""
+                COPY INTO {self.schema}.{self.table} {base_sql}
+            """
 
         self.log.info('Executing COPY command...')
         snowflake_hook.run(copy_query, self.autocommit)
diff --git a/tests/providers/google/cloud/operators/test_pubsub.py b/tests/providers/google/cloud/operators/test_pubsub.py
index 6abfffa..02f356d 100644
--- a/tests/providers/google/cloud/operators/test_pubsub.py
+++ b/tests/providers/google/cloud/operators/test_pubsub.py
@@ -230,7 +230,7 @@ class TestPubSubPullOperator(unittest.TestCase):
     def _generate_messages(self, count):
         return [
             ReceivedMessage(
-                ack_id="%s" % i,
+                ack_id=f"{i}",
                 message={
                     "data": f'Message {i}'.encode('utf8'),
                     "attributes": {"type": "generated message"},
diff --git a/tests/providers/google/cloud/sensors/test_pubsub.py b/tests/providers/google/cloud/sensors/test_pubsub.py
index 795860b..6a502aa 100644
--- a/tests/providers/google/cloud/sensors/test_pubsub.py
+++ b/tests/providers/google/cloud/sensors/test_pubsub.py
@@ -35,7 +35,7 @@ class TestPubSubPullSensor(unittest.TestCase):
     def _generate_messages(self, count):
         return [
             ReceivedMessage(
-                ack_id="%s" % i,
+                ack_id=f"{i}",
                 message={
                     "data": f'Message {i}'.encode('utf8'),
                     "attributes": {"type": "generated message"},
diff --git a/tests/providers/snowflake/transfers/test_s3_to_snowflake.py b/tests/providers/snowflake/transfers/test_s3_to_snowflake.py
index 02e6e5a..f965acf 100644
--- a/tests/providers/snowflake/transfers/test_s3_to_snowflake.py
+++ b/tests/providers/snowflake/transfers/test_s3_to_snowflake.py
@@ -46,19 +46,15 @@ class TestS3ToSnowflakeTransfer(unittest.TestCase):
         files = str(s3_keys)
         files = files.replace('[', '(')
         files = files.replace(']', ')')
-        base_sql = """
+        base_sql = f"""
                 FROM @{stage}/
                 files={files}
                 file_format={file_format}
-            """.format(
-            stage=stage, files=files, file_format=file_format
-        )
+            """
 
-        copy_query = """
+        copy_query = f"""
                 COPY INTO {schema}.{table} {base_sql}
-            """.format(
-            schema=schema, table=table, base_sql=base_sql
-        )
+            """
 
         assert mock_run.call_count == 1
         assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], copy_query)
@@ -86,13 +82,11 @@ class TestS3ToSnowflakeTransfer(unittest.TestCase):
         files = str(s3_keys)
         files = files.replace('[', '(')
         files = files.replace(']', ')')
-        base_sql = """
+        base_sql = f"""
                 FROM @{stage}/
                 files={files}
                 file_format={file_format}
-            """.format(
-            stage=stage, files=files, file_format=file_format
-        )
+            """
 
         copy_query = """
                 COPY INTO {schema}.{table}({columns}) {base_sql}


[airflow] 36/41: Pin moto to <2 (#14433)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 80d2644e471feef728d235f83a1a723b10f80d57
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Wed Feb 24 22:22:54 2021 +0000

    Pin moto to <2 (#14433)
    
    https://pypi.org/project/moto/#history -- moto 2.0.0 was released yesterday and is causing CI failures
    (cherry picked from commit 802159767baf1768d92c6047c2fdb2094ee7a2a8)
---
 setup.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setup.py b/setup.py
index ad4fdd5..2867b36 100644
--- a/setup.py
+++ b/setup.py
@@ -491,7 +491,7 @@ devel = [
     # See: https://github.com/spulec/moto/issues/3535
     'mock<4.0.3',
     'mongomock',
-    'moto',
+    'moto<2',
     'mypy==0.770',
     'parameterized',
     'paramiko',


[airflow] 18/41: Update compatibility with google-cloud-os-login>=2.0.0 (#13126)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit e2eceb5bc8ea8c6094b3fdc5057564d284661811
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Thu Dec 17 11:00:59 2020 +0100

    Update compatibility with google-cloud-os-login>=2.0.0 (#13126)
    
    (cherry picked from commit 1259c712a42d69135dc389de88f79942c70079a3)
---
 airflow/providers/google/cloud/hooks/os_login.py   | 16 +++++++++-------
 setup.py                                           |  2 +-
 .../providers/google/cloud/hooks/test_os_login.py  | 22 ++++++++++++----------
 3 files changed, 22 insertions(+), 18 deletions(-)

diff --git a/airflow/providers/google/cloud/hooks/os_login.py b/airflow/providers/google/cloud/hooks/os_login.py
index c7a4234..361ea60 100644
--- a/airflow/providers/google/cloud/hooks/os_login.py
+++ b/airflow/providers/google/cloud/hooks/os_login.py
@@ -17,7 +17,7 @@
 
 from typing import Dict, Optional, Sequence, Union
 
-from google.cloud.oslogin_v1 import OsLoginServiceClient
+from google.cloud.oslogin_v1 import ImportSshPublicKeyResponse, OsLoginServiceClient
 
 from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
 
@@ -54,7 +54,7 @@ class OSLoginHook(GoogleBaseHook):
     @GoogleBaseHook.fallback_to_default_project_id
     def import_ssh_public_key(
         self, user: str, ssh_public_key: Dict, project_id: str, retry=None, timeout=None, metadata=None
-    ):
+    ) -> ImportSshPublicKeyResponse:
         """
         Adds an SSH public key and returns the profile information. Default POSIX
         account information is set when no username and UID exist as part of the
@@ -74,14 +74,16 @@ class OSLoginHook(GoogleBaseHook):
         :type timeout: Optional[float]
         :param metadata: Additional metadata that is provided to the method.
         :type metadata: Optional[Sequence[Tuple[str, str]]]
-        :return:  A :class:`~google.cloud.oslogin_v1.types.ImportSshPublicKeyResponse` instance.
+        :return: A :class:`~google.cloud.oslogin_v1.ImportSshPublicKeyResponse` instance.
         """
         conn = self.get_conn()
         return conn.import_ssh_public_key(
-            parent=OsLoginServiceClient.user_path(user=user),
-            ssh_public_key=ssh_public_key,
-            project_id=project_id,
+            request=dict(
+                parent=f"users/{user}",
+                ssh_public_key=ssh_public_key,
+                project_id=project_id,
+            ),
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
diff --git a/setup.py b/setup.py
index 7071795..0586bf3 100644
--- a/setup.py
+++ b/setup.py
@@ -295,7 +295,7 @@ google = [
     'google-cloud-logging>=1.14.0,<2.0.0',
     'google-cloud-memcache>=0.2.0',
     'google-cloud-monitoring>=0.34.0,<2.0.0',
-    'google-cloud-os-login>=1.0.0,<2.0.0',
+    'google-cloud-os-login>=2.0.0,<3.0.0',
     'google-cloud-pubsub>=1.0.0,<2.0.0',
     'google-cloud-redis>=0.3.0,<2.0.0',
     'google-cloud-secret-manager>=0.2.0,<2.0.0',
diff --git a/tests/providers/google/cloud/hooks/test_os_login.py b/tests/providers/google/cloud/hooks/test_os_login.py
index 303f1ea..d2b88e4 100644
--- a/tests/providers/google/cloud/hooks/test_os_login.py
+++ b/tests/providers/google/cloud/hooks/test_os_login.py
@@ -38,7 +38,7 @@ TEST_CREDENTIALS = mock.MagicMock()
 TEST_BODY: Dict = mock.MagicMock()
 TEST_RETRY: Retry = mock.MagicMock()
 TEST_TIMEOUT: float = 4
-TEST_METADATA: Sequence[Tuple[str, str]] = []
+TEST_METADATA: Sequence[Tuple[str, str]] = ()
 TEST_PARENT: str = "users/test-user"
 
 
@@ -67,9 +67,11 @@ class TestOSLoginHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.import_ssh_public_key.assert_called_once_with(
-            parent=TEST_PARENT,
-            ssh_public_key=TEST_BODY,
-            project_id=TEST_PROJECT_ID,
+            request=dict(
+                parent=TEST_PARENT,
+                ssh_public_key=TEST_BODY,
+                project_id=TEST_PROJECT_ID,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -101,9 +103,11 @@ class TestOSLoginHookWithDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.import_ssh_public_key.assert_called_once_with(
-            parent=TEST_PARENT,
-            ssh_public_key=TEST_BODY,
-            project_id=TEST_PROJECT_ID_2,
+            request=dict(
+                parent=TEST_PARENT,
+                ssh_public_key=TEST_BODY,
+                project_id=TEST_PROJECT_ID_2,
+            ),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,
@@ -135,9 +139,7 @@ class TestOSLoginHookWithoutDefaultProjectIdHook(TestCase):
             metadata=TEST_METADATA,
         )
         mock_get_conn.return_value.import_ssh_public_key.assert_called_once_with(
-            parent=TEST_PARENT,
-            ssh_public_key=TEST_BODY,
-            project_id=TEST_PROJECT_ID,
+            request=dict(parent=TEST_PARENT, ssh_public_key=TEST_BODY, project_id=TEST_PROJECT_ID),
             retry=TEST_RETRY,
             timeout=TEST_TIMEOUT,
             metadata=TEST_METADATA,


[airflow] 41/41: fixup! Add Neo4j hook and operator (#13324)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 0d78fe173b9ac385bf42fd004f4c77a4e0f9467c
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Mar 3 10:21:57 2021 +0100

    fixup! Add Neo4j hook and operator (#13324)
---
 docs/apache-airflow-providers-neo4j/commits.rst | 10 ++--
 docs/apache-airflow-providers-neo4j/index.rst   | 76 +++++++++++++++++++++++++
 2 files changed, 81 insertions(+), 5 deletions(-)

diff --git a/docs/apache-airflow-providers-neo4j/commits.rst b/docs/apache-airflow-providers-neo4j/commits.rst
index 76dc03e..bfe9721 100644
--- a/docs/apache-airflow-providers-neo4j/commits.rst
+++ b/docs/apache-airflow-providers-neo4j/commits.rst
@@ -31,11 +31,11 @@ For high-level changelog, see :doc:`package information including changelog <ind
 1.0.0
 .....
 
-Latest change: 2021-01-31
+Latest change: 2021-02-01
 
-================================================================================================  ===========  ========================================
+================================================================================================  ===========  ================================================
 Commit                                                                                            Committed    Subject
-================================================================================================  ===========  ========================================
-`4a9ce091b <https://github.com/apache/airflow/commit/4a9ce091b11b901e4f73d36457de29d5a2154159>`_  2021-01-31   ``Implement provider versioning tools``
+================================================================================================  ===========  ================================================
+`ac2f72c98 <https://github.com/apache/airflow/commit/ac2f72c98dc0821b33721054588adbf2bb53bb0b>`_  2021-02-01   ``Implement provider versioning tools (#13767)``
 `1d2977f6a <https://github.com/apache/airflow/commit/1d2977f6a4c67fa6174c79dcdc4e9ee3ce06f1b1>`_  2021-01-14   ``Add Neo4j hook and operator (#13324)``
-================================================================================================  ===========  ========================================
+================================================================================================  ===========  ================================================
diff --git a/docs/apache-airflow-providers-neo4j/index.rst b/docs/apache-airflow-providers-neo4j/index.rst
index cafc57b..dd995fb 100644
--- a/docs/apache-airflow-providers-neo4j/index.rst
+++ b/docs/apache-airflow-providers-neo4j/index.rst
@@ -46,3 +46,79 @@ Content
     :caption: Resources
 
     PyPI Repository <https://pypi.org/project/apache-airflow-providers-neo4j/>
+
+.. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME!
+
+
+.. toctree::
+    :maxdepth: 1
+    :caption: Commits
+
+    Detailed list of commits <commits>
+
+
+Package apache-airflow-providers-neo4j
+------------------------------------------------------
+
+`Neo4j <https://neo4j.com/>`__
+
+
+Release: 1.0.0
+
+Provider package
+----------------
+
+This is a provider package for ``neo4j`` provider. All classes for this provider package
+are in ``airflow.providers.neo4j`` python package.
+
+Installation
+------------
+
+.. note::
+
+    On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
+    does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
+    of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
+    ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option
+    ``--use-deprecated legacy-resolver`` to your pip install command.
+
+
+You can install this package on top of an existing airflow 2.* installation via
+``pip install apache-airflow-providers-neo4j``
+
+PIP requirements
+----------------
+
+=============  ==================
+PIP package    Version required
+=============  ==================
+``neo4j``      ``>=4.2.1``
+=============  ==================
+
+
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+Changelog
+---------
+
+
+1.0.0
+.....
+
+Initial version of the provider.


[airflow] 13/41: Minor doc fixes (#14547)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 35375f57da73b6a6459b781052e55b657aa75d80
Author: Xiaodong DENG <xd...@apache.org>
AuthorDate: Mon Mar 1 21:31:58 2021 +0100

    Minor doc fixes (#14547)
    
    (cherry picked from commit 391baee4047127fe722eeb7f4aec219c86a89295)
---
 docs/apache-airflow/installation.rst | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/docs/apache-airflow/installation.rst b/docs/apache-airflow/installation.rst
index f8042ac..eac6894 100644
--- a/docs/apache-airflow/installation.rst
+++ b/docs/apache-airflow/installation.rst
@@ -63,7 +63,7 @@ issues from ``pip`` 20.3.0 release have been fixed in 20.3.3). In order to insta
 either downgrade pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option
 ``--use-deprecated legacy-resolver`` to your pip install command.
 
-While they are some successes with using other tools like `poetry <https://python-poetry.org/>`_ or
+While there are some successes with using other tools like `poetry <https://python-poetry.org/>`_ or
 `pip-tools <https://pypi.org/project/pip-tools/>`_, they do not share the same workflow as
 ``pip`` - especially when it comes to constraint vs. requirements management.
 Installing via ``Poetry`` or ``pip-tools`` is not currently supported. If you wish to install airflow
@@ -81,8 +81,8 @@ environment. For instance, if you don't need connectivity with Postgres,
 you won't have to go through the trouble of installing the ``postgres-devel``
 yum package, or whatever equivalent applies on the distribution you are using.
 
-Most of the extra dependencies are linked to a corresponding providers package. For example "amazon" extra
-has a corresponding ``apache-airflow-providers-amazon`` providers package to be installed. When you install
+Most of the extra dependencies are linked to a corresponding provider package. For example "amazon" extra
+has a corresponding ``apache-airflow-providers-amazon`` provider package to be installed. When you install
 Airflow with such extras, the necessary provider packages are installed automatically (latest versions from
 PyPI for those packages). However you can freely upgrade and install provider packages independently from
 the main Airflow installation.
@@ -96,7 +96,7 @@ Provider packages
 
 Unlike Apache Airflow 1.10, the Airflow 2.0 is delivered in multiple, separate, but connected packages.
 The core of Airflow scheduling system is delivered as ``apache-airflow`` package and there are around
-60 providers packages which can be installed separately as so called ``Airflow Provider packages``.
+60 provider packages which can be installed separately as so called ``Airflow Provider packages``.
 The default Airflow installation doesn't have many integrations and you have to install them yourself.
 
 You can even develop and install your own providers for Airflow. For more information,
@@ -164,9 +164,9 @@ In order to have repeatable installation, starting from **Airflow 1.10.10** and
 ``constraints-master``, ``constraints-2-0`` and ``constraints-1-10`` orphan branches and then we create tag
 for each released version e.g. ``constraints-2.0.1``. This way, when we keep a tested and working set of dependencies.
 
-Those "known-to-be-working" constraints are per major/minor python version. You can use them as constraint
+Those "known-to-be-working" constraints are per major/minor Python version. You can use them as constraint
 files when installing Airflow from PyPI. Note that you have to specify correct Airflow version
-and python versions in the URL.
+and Python versions in the URL.
 
 You can create the URL to the file substituting the variables in the template below.
 


[airflow] 21/41: Support google-cloud-pubsub>=2.0.0 (#13127)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 8faa1bdb02422bd62eb730da7d653164050a7dd9
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Tue Dec 22 13:02:59 2020 +0100

    Support google-cloud-pubsub>=2.0.0 (#13127)
    
    (cherry picked from commit 8c00ec89b97aa6e725379d08c8ff29a01be47e73)
---
 airflow/providers/google/cloud/hooks/pubsub.py     |  81 ++++----
 airflow/providers/google/cloud/operators/pubsub.py |   3 +-
 airflow/providers/google/cloud/sensors/pubsub.py   |   3 +-
 setup.py                                           |   2 +-
 tests/providers/google/cloud/hooks/test_pubsub.py  | 221 +++++++++++----------
 .../google/cloud/operators/test_pubsub.py          |  16 +-
 .../providers/google/cloud/sensors/test_pubsub.py  |  16 +-
 7 files changed, 177 insertions(+), 165 deletions(-)

diff --git a/airflow/providers/google/cloud/hooks/pubsub.py b/airflow/providers/google/cloud/hooks/pubsub.py
index f2ae190..37240a2 100644
--- a/airflow/providers/google/cloud/hooks/pubsub.py
+++ b/airflow/providers/google/cloud/hooks/pubsub.py
@@ -111,7 +111,7 @@ class PubSubHook(GoogleBaseHook):
         self._validate_messages(messages)
 
         publisher = self.get_conn()
-        topic_path = PublisherClient.topic_path(project_id, topic)  # pylint: disable=no-member
+        topic_path = f"projects/{project_id}/topics/{topic}"
 
         self.log.info("Publish %d messages to topic (path) %s", len(messages), topic_path)
         try:
@@ -206,7 +206,7 @@ class PubSubHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]]
         """
         publisher = self.get_conn()
-        topic_path = PublisherClient.topic_path(project_id, topic)  # pylint: disable=no-member
+        topic_path = f"projects/{project_id}/topics/{topic}"
 
         # Add airflow-version label to the topic
         labels = labels or {}
@@ -216,13 +216,15 @@ class PubSubHook(GoogleBaseHook):
         try:
             # pylint: disable=no-member
             publisher.create_topic(
-                name=topic_path,
-                labels=labels,
-                message_storage_policy=message_storage_policy,
-                kms_key_name=kms_key_name,
+                request={
+                    "name": topic_path,
+                    "labels": labels,
+                    "message_storage_policy": message_storage_policy,
+                    "kms_key_name": kms_key_name,
+                },
                 retry=retry,
                 timeout=timeout,
-                metadata=metadata,
+                metadata=metadata or (),
             )
         except AlreadyExists:
             self.log.warning('Topic already exists: %s', topic)
@@ -266,16 +268,13 @@ class PubSubHook(GoogleBaseHook):
         :type metadata: Sequence[Tuple[str, str]]]
         """
         publisher = self.get_conn()
-        topic_path = PublisherClient.topic_path(project_id, topic)  # pylint: disable=no-member
+        topic_path = f"projects/{project_id}/topics/{topic}"
 
         self.log.info("Deleting topic (path) %s", topic_path)
         try:
             # pylint: disable=no-member
             publisher.delete_topic(
-                topic=topic_path,
-                retry=retry,
-                timeout=timeout,
-                metadata=metadata,
+                request={"topic": topic_path}, retry=retry, timeout=timeout, metadata=metadata or ()
             )
         except NotFound:
             self.log.warning('Topic does not exist: %s', topic_path)
@@ -401,27 +400,29 @@ class PubSubHook(GoogleBaseHook):
         labels['airflow-version'] = 'v' + version.replace('.', '-').replace('+', '-')
 
         # pylint: disable=no-member
-        subscription_path = SubscriberClient.subscription_path(subscription_project_id, subscription)
-        topic_path = SubscriberClient.topic_path(project_id, topic)
+        subscription_path = f"projects/{subscription_project_id}/subscriptions/{subscription}"
+        topic_path = f"projects/{project_id}/topics/{topic}"
 
         self.log.info("Creating subscription (path) %s for topic (path) %a", subscription_path, topic_path)
         try:
             subscriber.create_subscription(
-                name=subscription_path,
-                topic=topic_path,
-                push_config=push_config,
-                ack_deadline_seconds=ack_deadline_secs,
-                retain_acked_messages=retain_acked_messages,
-                message_retention_duration=message_retention_duration,
-                labels=labels,
-                enable_message_ordering=enable_message_ordering,
-                expiration_policy=expiration_policy,
-                filter_=filter_,
-                dead_letter_policy=dead_letter_policy,
-                retry_policy=retry_policy,
+                request={
+                    "name": subscription_path,
+                    "topic": topic_path,
+                    "push_config": push_config,
+                    "ack_deadline_seconds": ack_deadline_secs,
+                    "retain_acked_messages": retain_acked_messages,
+                    "message_retention_duration": message_retention_duration,
+                    "labels": labels,
+                    "enable_message_ordering": enable_message_ordering,
+                    "expiration_policy": expiration_policy,
+                    "filter": filter_,
+                    "dead_letter_policy": dead_letter_policy,
+                    "retry_policy": retry_policy,
+                },
                 retry=retry,
                 timeout=timeout,
-                metadata=metadata,
+                metadata=metadata or (),
             )
         except AlreadyExists:
             self.log.warning('Subscription already exists: %s', subscription_path)
@@ -466,13 +467,16 @@ class PubSubHook(GoogleBaseHook):
         """
         subscriber = self.subscriber_client
         # noqa E501 # pylint: disable=no-member
-        subscription_path = SubscriberClient.subscription_path(project_id, subscription)
+        subscription_path = f"projects/{project_id}/subscriptions/{subscription}"
 
         self.log.info("Deleting subscription (path) %s", subscription_path)
         try:
             # pylint: disable=no-member
             subscriber.delete_subscription(
-                subscription=subscription_path, retry=retry, timeout=timeout, metadata=metadata
+                request={"subscription": subscription_path},
+                retry=retry,
+                timeout=timeout,
+                metadata=metadata or (),
             )
 
         except NotFound:
@@ -527,18 +531,20 @@ class PubSubHook(GoogleBaseHook):
         """
         subscriber = self.subscriber_client
         # noqa E501 # pylint: disable=no-member,line-too-long
-        subscription_path = SubscriberClient.subscription_path(project_id, subscription)
+        subscription_path = f"projects/{project_id}/subscriptions/{subscription}"
 
         self.log.info("Pulling max %d messages from subscription (path) %s", max_messages, subscription_path)
         try:
             # pylint: disable=no-member
             response = subscriber.pull(
-                subscription=subscription_path,
-                max_messages=max_messages,
-                return_immediately=return_immediately,
+                request={
+                    "subscription": subscription_path,
+                    "max_messages": max_messages,
+                    "return_immediately": return_immediately,
+                },
                 retry=retry,
                 timeout=timeout,
-                metadata=metadata,
+                metadata=metadata or (),
             )
             result = getattr(response, 'received_messages', [])
             self.log.info("Pulled %d messages from subscription (path) %s", len(result), subscription_path)
@@ -591,17 +597,16 @@ class PubSubHook(GoogleBaseHook):
 
         subscriber = self.subscriber_client
         # noqa E501 # pylint: disable=no-member
-        subscription_path = SubscriberClient.subscription_path(project_id, subscription)
+        subscription_path = f"projects/{project_id}/subscriptions/{subscription}"
 
         self.log.info("Acknowledging %d ack_ids from subscription (path) %s", len(ack_ids), subscription_path)
         try:
             # pylint: disable=no-member
             subscriber.acknowledge(
-                subscription=subscription_path,
-                ack_ids=ack_ids,
+                request={"subscription": subscription_path, "ack_ids": ack_ids},
                 retry=retry,
                 timeout=timeout,
-                metadata=metadata,
+                metadata=metadata or (),
             )
         except (HttpError, GoogleAPICallError) as e:
             raise PubSubException(
diff --git a/airflow/providers/google/cloud/operators/pubsub.py b/airflow/providers/google/cloud/operators/pubsub.py
index e8cf735..23b545f 100644
--- a/airflow/providers/google/cloud/operators/pubsub.py
+++ b/airflow/providers/google/cloud/operators/pubsub.py
@@ -29,7 +29,6 @@ from google.cloud.pubsub_v1.types import (
     ReceivedMessage,
     RetryPolicy,
 )
-from google.protobuf.json_format import MessageToDict
 
 from airflow.models import BaseOperator
 from airflow.providers.google.cloud.hooks.pubsub import PubSubHook
@@ -958,6 +957,6 @@ class PubSubPullOperator(BaseOperator):
         :param context: same as in `execute`
         :return: value to be saved to XCom.
         """
-        messages_json = [MessageToDict(m) for m in pulled_messages]
+        messages_json = [ReceivedMessage.to_dict(m) for m in pulled_messages]
 
         return messages_json
diff --git a/airflow/providers/google/cloud/sensors/pubsub.py b/airflow/providers/google/cloud/sensors/pubsub.py
index d6e0be5..ff1f811 100644
--- a/airflow/providers/google/cloud/sensors/pubsub.py
+++ b/airflow/providers/google/cloud/sensors/pubsub.py
@@ -20,7 +20,6 @@ import warnings
 from typing import Any, Callable, Dict, List, Optional, Sequence, Union
 
 from google.cloud.pubsub_v1.types import ReceivedMessage
-from google.protobuf.json_format import MessageToDict
 
 from airflow.providers.google.cloud.hooks.pubsub import PubSubHook
 from airflow.sensors.base import BaseSensorOperator
@@ -200,6 +199,6 @@ class PubSubPullSensor(BaseSensorOperator):
         :param context: same as in `execute`
         :return: value to be saved to XCom.
         """
-        messages_json = [MessageToDict(m) for m in pulled_messages]
+        messages_json = [ReceivedMessage.to_dict(m) for m in pulled_messages]
 
         return messages_json
diff --git a/setup.py b/setup.py
index 1ec4f5d..ff9fd71 100644
--- a/setup.py
+++ b/setup.py
@@ -296,7 +296,7 @@ google = [
     'google-cloud-memcache>=0.2.0',
     'google-cloud-monitoring>=0.34.0,<2.0.0',
     'google-cloud-os-login>=2.0.0,<3.0.0',
-    'google-cloud-pubsub>=1.0.0,<2.0.0',
+    'google-cloud-pubsub>=2.0.0,<3.0.0',
     'google-cloud-redis>=0.3.0,<2.0.0',
     'google-cloud-secret-manager>=0.2.0,<2.0.0',
     'google-cloud-spanner>=1.10.0,<2.0.0',
diff --git a/tests/providers/google/cloud/hooks/test_pubsub.py b/tests/providers/google/cloud/hooks/test_pubsub.py
index 4086526..628d619 100644
--- a/tests/providers/google/cloud/hooks/test_pubsub.py
+++ b/tests/providers/google/cloud/hooks/test_pubsub.py
@@ -25,7 +25,6 @@ import pytest
 from google.api_core.exceptions import AlreadyExists, GoogleAPICallError
 from google.cloud.exceptions import NotFound
 from google.cloud.pubsub_v1.types import ReceivedMessage
-from google.protobuf.json_format import ParseDict
 from googleapiclient.errors import HttpError
 from parameterized import parameterized
 
@@ -67,15 +66,12 @@ class TestPubSubHook(unittest.TestCase):
 
     def _generate_messages(self, count) -> List[ReceivedMessage]:
         return [
-            ParseDict(
-                {
-                    "ack_id": str(i),
-                    "message": {
-                        "data": f'Message {i}'.encode('utf8'),
-                        "attributes": {"type": "generated message"},
-                    },
+            ReceivedMessage(
+                ack_id=str(i),
+                message={
+                    "data": f'Message {i}'.encode('utf8'),
+                    "attributes": {"type": "generated message"},
                 },
-                ReceivedMessage(),
             )
             for i in range(1, count + 1)
         ]
@@ -112,20 +108,19 @@ class TestPubSubHook(unittest.TestCase):
         create_method = mock_service.return_value.create_topic
         self.pubsub_hook.create_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC)
         create_method.assert_called_once_with(
-            name=EXPANDED_TOPIC,
-            labels=LABELS,
-            message_storage_policy=None,
-            kms_key_name=None,
+            request=dict(name=EXPANDED_TOPIC, labels=LABELS, message_storage_policy=None, kms_key_name=None),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn'))
     def test_delete_topic(self, mock_service):
         delete_method = mock_service.return_value.delete_topic
         self.pubsub_hook.delete_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC)
-        delete_method.assert_called_once_with(topic=EXPANDED_TOPIC, retry=None, timeout=None, metadata=None)
+        delete_method.assert_called_once_with(
+            request=dict(topic=EXPANDED_TOPIC), retry=None, timeout=None, metadata=()
+        )
 
     @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn'))
     def test_delete_nonexisting_topic_failifnotexists(self, mock_service):
@@ -177,21 +172,23 @@ class TestPubSubHook(unittest.TestCase):
             project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION
         )
         create_method.assert_called_once_with(
-            name=EXPANDED_SUBSCRIPTION,
-            topic=EXPANDED_TOPIC,
-            push_config=None,
-            ack_deadline_seconds=10,
-            retain_acked_messages=None,
-            message_retention_duration=None,
-            labels=LABELS,
-            enable_message_ordering=False,
-            expiration_policy=None,
-            filter_=None,
-            dead_letter_policy=None,
-            retry_policy=None,
+            request=dict(
+                name=EXPANDED_SUBSCRIPTION,
+                topic=EXPANDED_TOPIC,
+                push_config=None,
+                ack_deadline_seconds=10,
+                retain_acked_messages=None,
+                message_retention_duration=None,
+                labels=LABELS,
+                enable_message_ordering=False,
+                expiration_policy=None,
+                filter=None,
+                dead_letter_policy=None,
+                retry_policy=None,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
         assert TEST_SUBSCRIPTION == response
 
@@ -208,21 +205,23 @@ class TestPubSubHook(unittest.TestCase):
             'a-different-project', TEST_SUBSCRIPTION
         )
         create_method.assert_called_once_with(
-            name=expected_subscription,
-            topic=EXPANDED_TOPIC,
-            push_config=None,
-            ack_deadline_seconds=10,
-            retain_acked_messages=None,
-            message_retention_duration=None,
-            labels=LABELS,
-            enable_message_ordering=False,
-            expiration_policy=None,
-            filter_=None,
-            dead_letter_policy=None,
-            retry_policy=None,
+            request=dict(
+                name=expected_subscription,
+                topic=EXPANDED_TOPIC,
+                push_config=None,
+                ack_deadline_seconds=10,
+                retain_acked_messages=None,
+                message_retention_duration=None,
+                labels=LABELS,
+                enable_message_ordering=False,
+                expiration_policy=None,
+                filter=None,
+                dead_letter_policy=None,
+                retry_policy=None,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
         assert TEST_SUBSCRIPTION == response
@@ -232,7 +231,7 @@ class TestPubSubHook(unittest.TestCase):
         self.pubsub_hook.delete_subscription(project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION)
         delete_method = mock_service.delete_subscription
         delete_method.assert_called_once_with(
-            subscription=EXPANDED_SUBSCRIPTION, retry=None, timeout=None, metadata=None
+            request=dict(subscription=EXPANDED_SUBSCRIPTION), retry=None, timeout=None, metadata=()
         )
 
     @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client'))
@@ -266,21 +265,23 @@ class TestPubSubHook(unittest.TestCase):
 
         response = self.pubsub_hook.create_subscription(project_id=TEST_PROJECT, topic=TEST_TOPIC)
         create_method.assert_called_once_with(
-            name=expected_name,
-            topic=EXPANDED_TOPIC,
-            push_config=None,
-            ack_deadline_seconds=10,
-            retain_acked_messages=None,
-            message_retention_duration=None,
-            labels=LABELS,
-            enable_message_ordering=False,
-            expiration_policy=None,
-            filter_=None,
-            dead_letter_policy=None,
-            retry_policy=None,
+            request=dict(
+                name=expected_name,
+                topic=EXPANDED_TOPIC,
+                push_config=None,
+                ack_deadline_seconds=10,
+                retain_acked_messages=None,
+                message_retention_duration=None,
+                labels=LABELS,
+                enable_message_ordering=False,
+                expiration_policy=None,
+                filter=None,
+                dead_letter_policy=None,
+                retry_policy=None,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
         assert f'sub-{TEST_UUID}' == response
 
@@ -292,21 +293,23 @@ class TestPubSubHook(unittest.TestCase):
             project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION, ack_deadline_secs=30
         )
         create_method.assert_called_once_with(
-            name=EXPANDED_SUBSCRIPTION,
-            topic=EXPANDED_TOPIC,
-            push_config=None,
-            ack_deadline_seconds=30,
-            retain_acked_messages=None,
-            message_retention_duration=None,
-            labels=LABELS,
-            enable_message_ordering=False,
-            expiration_policy=None,
-            filter_=None,
-            dead_letter_policy=None,
-            retry_policy=None,
+            request=dict(
+                name=EXPANDED_SUBSCRIPTION,
+                topic=EXPANDED_TOPIC,
+                push_config=None,
+                ack_deadline_seconds=30,
+                retain_acked_messages=None,
+                message_retention_duration=None,
+                labels=LABELS,
+                enable_message_ordering=False,
+                expiration_policy=None,
+                filter=None,
+                dead_letter_policy=None,
+                retry_policy=None,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
         assert TEST_SUBSCRIPTION == response
 
@@ -321,21 +324,23 @@ class TestPubSubHook(unittest.TestCase):
             filter_='attributes.domain="com"',
         )
         create_method.assert_called_once_with(
-            name=EXPANDED_SUBSCRIPTION,
-            topic=EXPANDED_TOPIC,
-            push_config=None,
-            ack_deadline_seconds=10,
-            retain_acked_messages=None,
-            message_retention_duration=None,
-            labels=LABELS,
-            enable_message_ordering=False,
-            expiration_policy=None,
-            filter_='attributes.domain="com"',
-            dead_letter_policy=None,
-            retry_policy=None,
+            request=dict(
+                name=EXPANDED_SUBSCRIPTION,
+                topic=EXPANDED_TOPIC,
+                push_config=None,
+                ack_deadline_seconds=10,
+                retain_acked_messages=None,
+                message_retention_duration=None,
+                labels=LABELS,
+                enable_message_ordering=False,
+                expiration_policy=None,
+                filter='attributes.domain="com"',
+                dead_letter_policy=None,
+                retry_policy=None,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
         assert TEST_SUBSCRIPTION == response
 
@@ -401,12 +406,14 @@ class TestPubSubHook(unittest.TestCase):
             project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, max_messages=10
         )
         pull_method.assert_called_once_with(
-            subscription=EXPANDED_SUBSCRIPTION,
-            max_messages=10,
-            return_immediately=False,
+            request=dict(
+                subscription=EXPANDED_SUBSCRIPTION,
+                max_messages=10,
+                return_immediately=False,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
         assert pulled_messages == response
 
@@ -419,12 +426,14 @@ class TestPubSubHook(unittest.TestCase):
             project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, max_messages=10
         )
         pull_method.assert_called_once_with(
-            subscription=EXPANDED_SUBSCRIPTION,
-            max_messages=10,
-            return_immediately=False,
+            request=dict(
+                subscription=EXPANDED_SUBSCRIPTION,
+                max_messages=10,
+                return_immediately=False,
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
         assert [] == response
 
@@ -445,12 +454,14 @@ class TestPubSubHook(unittest.TestCase):
         with pytest.raises(PubSubException):
             self.pubsub_hook.pull(project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, max_messages=10)
             pull_method.assert_called_once_with(
-                subscription=EXPANDED_SUBSCRIPTION,
-                max_messages=10,
-                return_immediately=False,
+                request=dict(
+                    subscription=EXPANDED_SUBSCRIPTION,
+                    max_messages=10,
+                    return_immediately=False,
+                ),
                 retry=None,
                 timeout=None,
-                metadata=None,
+                metadata=(),
             )
 
     @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client'))
@@ -461,11 +472,13 @@ class TestPubSubHook(unittest.TestCase):
             project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, ack_ids=['1', '2', '3']
         )
         ack_method.assert_called_once_with(
-            subscription=EXPANDED_SUBSCRIPTION,
-            ack_ids=['1', '2', '3'],
+            request=dict(
+                subscription=EXPANDED_SUBSCRIPTION,
+                ack_ids=['1', '2', '3'],
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client'))
@@ -478,11 +491,13 @@ class TestPubSubHook(unittest.TestCase):
             messages=self._generate_messages(3),
         )
         ack_method.assert_called_once_with(
-            subscription=EXPANDED_SUBSCRIPTION,
-            ack_ids=['1', '2', '3'],
+            request=dict(
+                subscription=EXPANDED_SUBSCRIPTION,
+                ack_ids=['1', '2', '3'],
+            ),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @parameterized.expand(
@@ -504,11 +519,13 @@ class TestPubSubHook(unittest.TestCase):
                 project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, ack_ids=['1', '2', '3']
             )
             ack_method.assert_called_once_with(
-                subscription=EXPANDED_SUBSCRIPTION,
-                ack_ids=['1', '2', '3'],
+                request=dict(
+                    subscription=EXPANDED_SUBSCRIPTION,
+                    ack_ids=['1', '2', '3'],
+                ),
                 retry=None,
                 timeout=None,
-                metadata=None,
+                metadata=(),
             )
 
     @parameterized.expand(
diff --git a/tests/providers/google/cloud/operators/test_pubsub.py b/tests/providers/google/cloud/operators/test_pubsub.py
index 9ff71e6..6abfffa 100644
--- a/tests/providers/google/cloud/operators/test_pubsub.py
+++ b/tests/providers/google/cloud/operators/test_pubsub.py
@@ -21,7 +21,6 @@ from typing import Any, Dict, List
 from unittest import mock
 
 from google.cloud.pubsub_v1.types import ReceivedMessage
-from google.protobuf.json_format import MessageToDict, ParseDict
 
 from airflow.providers.google.cloud.operators.pubsub import (
     PubSubCreateSubscriptionOperator,
@@ -230,21 +229,18 @@ class TestPubSubPublishOperator(unittest.TestCase):
 class TestPubSubPullOperator(unittest.TestCase):
     def _generate_messages(self, count):
         return [
-            ParseDict(
-                {
-                    "ack_id": "%s" % i,
-                    "message": {
-                        "data": f'Message {i}'.encode('utf8'),
-                        "attributes": {"type": "generated message"},
-                    },
+            ReceivedMessage(
+                ack_id="%s" % i,
+                message={
+                    "data": f'Message {i}'.encode('utf8'),
+                    "attributes": {"type": "generated message"},
                 },
-                ReceivedMessage(),
             )
             for i in range(1, count + 1)
         ]
 
     def _generate_dicts(self, count):
-        return [MessageToDict(m) for m in self._generate_messages(count)]
+        return [ReceivedMessage.to_dict(m) for m in self._generate_messages(count)]
 
     @mock.patch('airflow.providers.google.cloud.operators.pubsub.PubSubHook')
     def test_execute_no_messages(self, mock_hook):
diff --git a/tests/providers/google/cloud/sensors/test_pubsub.py b/tests/providers/google/cloud/sensors/test_pubsub.py
index ba1aee9..795860b 100644
--- a/tests/providers/google/cloud/sensors/test_pubsub.py
+++ b/tests/providers/google/cloud/sensors/test_pubsub.py
@@ -22,7 +22,6 @@ from unittest import mock
 
 import pytest
 from google.cloud.pubsub_v1.types import ReceivedMessage
-from google.protobuf.json_format import MessageToDict, ParseDict
 
 from airflow.exceptions import AirflowSensorTimeout
 from airflow.providers.google.cloud.sensors.pubsub import PubSubPullSensor
@@ -35,21 +34,18 @@ TEST_SUBSCRIPTION = 'test-subscription'
 class TestPubSubPullSensor(unittest.TestCase):
     def _generate_messages(self, count):
         return [
-            ParseDict(
-                {
-                    "ack_id": "%s" % i,
-                    "message": {
-                        "data": f'Message {i}'.encode('utf8'),
-                        "attributes": {"type": "generated message"},
-                    },
+            ReceivedMessage(
+                ack_id="%s" % i,
+                message={
+                    "data": f'Message {i}'.encode('utf8'),
+                    "attributes": {"type": "generated message"},
                 },
-                ReceivedMessage(),
             )
             for i in range(1, count + 1)
         ]
 
     def _generate_dicts(self, count):
-        return [MessageToDict(m) for m in self._generate_messages(count)]
+        return [ReceivedMessage.to_dict(m) for m in self._generate_messages(count)]
 
     @mock.patch('airflow.providers.google.cloud.sensors.pubsub.PubSubHook')
     def test_poke_no_messages(self, mock_hook):


[airflow] 02/41: Switch to f-strings using flynt. (#13732)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 95889128afec4ebb8a3dfe8a8e13a7264d1a3bd6
Author: Joshua Carp <jm...@gmail.com>
AuthorDate: Sat Jan 23 00:19:38 2021 -0500

    Switch to f-strings using flynt. (#13732)
    
    (cherry picked from commit a9ac2b040b64de1aa5d9c2b9def33334e36a8d22)
---
 .pre-commit-config.yaml                            |  7 ++++
 BREEZE.rst                                         |  2 +-
 STATIC_CODE_CHECKS.rst                             |  2 +
 airflow/api/common/experimental/get_code.py        |  2 +-
 airflow/api/common/experimental/pool.py            |  6 +--
 .../api_connexion/endpoints/connection_endpoint.py |  2 +-
 airflow/cli/commands/dag_command.py                |  2 +-
 airflow/cli/commands/task_command.py               | 11 ++----
 airflow/cli/commands/user_command.py               |  8 ++--
 airflow/cli/commands/variable_command.py           |  2 +-
 airflow/configuration.py                           |  2 +-
 .../example_passing_params_via_test_command.py     |  6 +--
 airflow/example_dags/example_trigger_target_dag.py |  2 +-
 airflow/example_dags/subdags/subdag.py             |  2 +-
 airflow/example_dags/tutorial_taskflow_api_etl.py  |  2 +-
 airflow/hooks/dbapi.py                             |  2 +-
 airflow/kubernetes/refresh_config.py               |  2 +-
 airflow/models/connection.py                       |  6 +--
 airflow/models/dag.py                              |  2 +-
 airflow/models/taskinstance.py                     |  4 +-
 airflow/models/xcom.py                             |  4 +-
 airflow/operators/sql.py                           | 14 +++----
 airflow/providers/amazon/aws/hooks/datasync.py     |  4 +-
 airflow/providers/amazon/aws/hooks/dynamodb.py     |  4 +-
 airflow/providers/amazon/aws/hooks/sagemaker.py    |  4 +-
 .../providers/amazon/aws/log/s3_task_handler.py    |  2 +-
 airflow/providers/amazon/aws/operators/datasync.py |  2 +-
 .../amazon/aws/operators/emr_add_steps.py          |  2 +-
 .../amazon/aws/operators/emr_create_job_flow.py    |  2 +-
 .../amazon/aws/operators/emr_modify_cluster.py     |  2 +-
 .../amazon/aws/operators/emr_terminate_job_flow.py |  2 +-
 .../amazon/aws/operators/sagemaker_endpoint.py     |  2 +-
 .../aws/operators/sagemaker_endpoint_config.py     |  2 +-
 .../amazon/aws/operators/sagemaker_model.py        |  2 +-
 .../amazon/aws/operators/sagemaker_processing.py   |  2 +-
 .../amazon/aws/operators/sagemaker_training.py     |  2 +-
 .../amazon/aws/operators/sagemaker_transform.py    |  2 +-
 .../amazon/aws/operators/sagemaker_tuning.py       |  2 +-
 .../providers/amazon/aws/sensors/sagemaker_base.py |  2 +-
 airflow/providers/apache/druid/hooks/druid.py      |  4 +-
 airflow/providers/apache/hdfs/sensors/hdfs.py      |  2 +-
 airflow/providers/apache/hive/hooks/hive.py        |  8 +---
 .../providers/apache/hive/operators/hive_stats.py  | 24 +++++-------
 airflow/providers/apache/spark/hooks/spark_jdbc.py |  2 +-
 .../providers/apache/spark/hooks/spark_submit.py   |  8 ++--
 .../providers/cncf/kubernetes/hooks/kubernetes.py  |  6 +--
 .../cncf/kubernetes/operators/kubernetes_pod.py    |  3 +-
 .../cncf/kubernetes/sensors/spark_kubernetes.py    |  2 +-
 airflow/providers/databricks/hooks/databricks.py   |  2 +-
 airflow/providers/docker/operators/docker_swarm.py |  2 +-
 airflow/providers/ftp/hooks/ftp.py                 |  4 +-
 airflow/providers/google/cloud/hooks/bigquery.py   |  4 +-
 airflow/providers/google/cloud/hooks/compute.py    | 20 +++-------
 airflow/providers/google/cloud/hooks/dataflow.py   | 12 +++---
 airflow/providers/google/cloud/hooks/functions.py  |  4 +-
 .../google/cloud/hooks/kubernetes_engine.py        |  2 +-
 .../providers/google/cloud/log/gcs_task_handler.py |  4 +-
 .../providers/google/cloud/operators/bigquery.py   |  4 +-
 .../providers/google/cloud/operators/dataproc.py   |  2 +-
 .../jenkins/operators/jenkins_job_trigger.py       |  6 +--
 airflow/providers/jira/hooks/jira.py               |  4 +-
 airflow/providers/jira/operators/jira.py           |  4 +-
 .../azure/operators/azure_container_instances.py   |  2 +-
 .../providers/microsoft/winrm/operators/winrm.py   |  2 +-
 airflow/providers/mysql/hooks/mysql.py             | 21 +++-------
 airflow/providers/opsgenie/hooks/opsgenie_alert.py |  2 +-
 airflow/providers/oracle/hooks/oracle.py           |  2 +-
 airflow/providers/pagerduty/hooks/pagerduty.py     |  2 +-
 airflow/providers/plexus/operators/job.py          |  4 +-
 airflow/providers/postgres/hooks/postgres.py       |  2 +-
 airflow/providers/qubole/hooks/qubole.py           |  4 +-
 airflow/providers/salesforce/hooks/salesforce.py   |  2 +-
 airflow/providers/sftp/operators/sftp.py           |  2 +-
 airflow/providers/ssh/operators/ssh.py             |  2 +-
 airflow/security/kerberos.py                       |  4 +-
 airflow/security/utils.py                          |  2 +-
 airflow/sensors/date_time.py                       |  2 +-
 airflow/sensors/sql.py                             |  2 +-
 airflow/utils/cli.py                               |  2 +-
 airflow/utils/code_utils.py                        |  2 +-
 airflow/utils/log/file_task_handler.py             |  6 +--
 airflow/utils/timezone.py                          |  2 +-
 airflow/www/utils.py                               | 10 ++---
 airflow/www/validators.py                          |  4 +-
 airflow/www/views.py                               | 19 +++------
 breeze-complete                                    |  1 +
 docs/exts/airflow_intersphinx.py                   |  2 +-
 docs/exts/exampleinclude.py                        |  2 +-
 docs/exts/redirects.py                             |  2 +-
 metastore_browser/hive_metastore.py                | 14 +++----
 .../pre_commit_check_provider_yaml_files.py        |  8 ++--
 scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py    |  6 +--
 tests/api/common/experimental/test_pool.py         |  2 +-
 tests/cli/commands/test_connection_command.py      | 14 +++----
 tests/core/test_core.py                            |  2 +-
 tests/dags/test_subdag.py                          |  2 +-
 tests/dags_corrupted/test_impersonation_custom.py  |  2 +-
 tests/executors/test_celery_executor.py            |  2 +-
 tests/hooks/test_dbapi.py                          |  2 +-
 tests/models/test_baseoperator.py                  |  4 +-
 tests/models/test_connection.py                    |  2 +-
 tests/models/test_dag.py                           |  4 +-
 tests/models/test_dagbag.py                        |  6 +--
 tests/models/test_renderedtifields.py              |  2 +-
 .../amazon/aws/hooks/test_batch_waiters.py         |  2 +-
 tests/providers/amazon/aws/hooks/test_s3.py        |  4 +-
 .../apache/hive/operators/test_hive_stats.py       | 14 +++----
 .../apache/hive/transfers/test_mysql_to_hive.py    | 24 +++++-------
 .../providers/apache/spark/hooks/test_spark_sql.py | 10 ++---
 tests/providers/apache/sqoop/hooks/test_sqoop.py   | 45 +++++++++-------------
 .../elasticsearch/log/elasticmock/__init__.py      |  2 +-
 .../providers/google/cloud/hooks/test_cloud_sql.py |  4 +-
 tests/providers/google/cloud/hooks/test_pubsub.py  | 32 +++++++--------
 .../google/cloud/operators/test_dataflow.py        |  2 +-
 .../google/cloud/operators/test_mlengine_utils.py  |  6 +--
 tests/providers/google/cloud/sensors/test_gcs.py   |  2 +-
 .../cloud/transfers/test_bigquery_to_bigquery.py   |  2 +-
 .../google/cloud/transfers/test_gcs_to_gcs.py      |  2 +-
 .../cloud/utils/test_mlengine_operator_utils.py    |  6 +--
 tests/providers/mysql/hooks/test_mysql.py          |  6 +--
 tests/serialization/test_dag_serialization.py      |  4 +-
 tests/test_utils/gcp_system_helpers.py             |  2 +-
 tests/test_utils/logging_command_executor.py       |  2 +-
 tests/test_utils/mock_operators.py                 |  2 +-
 tests/utils/test_helpers.py                        |  4 +-
 tests/www/api/experimental/test_endpoints.py       |  2 +-
 tests/www/test_views.py                            | 16 ++++----
 127 files changed, 280 insertions(+), 361 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index aea91e3..0eb96fd 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -609,4 +609,11 @@ repos:
         entry: "./scripts/ci/pre_commit/pre_commit_in_container_bats_test.sh"
         files: ^tests/bats/in_container/.*.bats$|^scripts/in_container/.*sh
         pass_filenames: false
+      - id: flynt
+        name: Convert to f-strings with flynt
+        entry: flynt
+        language: python
+        language_version: python3
+        additional_dependencies: ['flynt']
+        files: \.py$
         ## ONLY ADD PRE-COMMITS HERE THAT REQUIRE CI IMAGE
diff --git a/BREEZE.rst b/BREEZE.rst
index 131f94c..f4689ba 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -2232,7 +2232,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
                  check-executables-have-shebangs check-hooks-apply check-integrations
                  check-merge-conflict check-xml consistent-pylint daysago-import-check
                  debug-statements detect-private-key doctoc dont-use-safe-filter end-of-file-fixer
-                 fix-encoding-pragma flake8 forbid-tabs helm-lint identity
+                 fix-encoding-pragma flake8 flynt forbid-tabs helm-lint identity
                  incorrect-use-of-LoggingMixin insert-license isort json-schema language-matters
                  lint-dockerfile lint-openapi markdownlint mermaid mixed-line-ending mypy mypy-helm
                  no-providers-in-core-examples no-relative-imports pre-commit-descriptions
diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst
index 55674bf..1c28dd9 100644
--- a/STATIC_CODE_CHECKS.rst
+++ b/STATIC_CODE_CHECKS.rst
@@ -102,6 +102,8 @@ require Breeze Docker images to be installed locally:
 ----------------------------------- ---------------------------------------------------------------- ------------
 ``flake8``                            Runs flake8.                                                         *
 ----------------------------------- ---------------------------------------------------------------- ------------
+``flynt``                             Runs flynt.
+----------------------------------- ---------------------------------------------------------------- ------------
 ``forbid-tabs``                       Fails if tabs are used in the project.
 ----------------------------------- ---------------------------------------------------------------- ------------
 ``helm-lint``                         Verifies if helm lint passes for the chart
diff --git a/airflow/api/common/experimental/get_code.py b/airflow/api/common/experimental/get_code.py
index 99f248b..79b0b9f 100644
--- a/airflow/api/common/experimental/get_code.py
+++ b/airflow/api/common/experimental/get_code.py
@@ -32,5 +32,5 @@ def get_code(dag_id: str) -> str:
     try:
         return DagCode.get_code_by_fileloc(dag.fileloc)
     except (OSError, DagCodeNotFound) as exception:
-        error_message = "Error {} while reading Dag id {} Code".format(str(exception), dag_id)
+        error_message = f"Error {str(exception)} while reading Dag id {dag_id} Code"
         raise AirflowException(error_message, exception)
diff --git a/airflow/api/common/experimental/pool.py b/airflow/api/common/experimental/pool.py
index 0f1d1c7..30950ea 100644
--- a/airflow/api/common/experimental/pool.py
+++ b/airflow/api/common/experimental/pool.py
@@ -29,7 +29,7 @@ def get_pool(name, session=None):
 
     pool = session.query(Pool).filter_by(pool=name).first()
     if pool is None:
-        raise PoolNotFound("Pool '%s' doesn't exist" % name)
+        raise PoolNotFound(f"Pool '{name}' doesn't exist")
 
     return pool
 
@@ -49,7 +49,7 @@ def create_pool(name, slots, description, session=None):
     try:
         slots = int(slots)
     except ValueError:
-        raise AirflowBadRequest("Bad value for `slots`: %s" % slots)
+        raise AirflowBadRequest(f"Bad value for `slots`: {slots}")
 
     # Get the length of the pool column
     pool_name_length = Pool.pool.property.columns[0].type.length
@@ -81,7 +81,7 @@ def delete_pool(name, session=None):
 
     pool = session.query(Pool).filter_by(pool=name).first()
     if pool is None:
-        raise PoolNotFound("Pool '%s' doesn't exist" % name)
+        raise PoolNotFound(f"Pool '{name}' doesn't exist")
 
     session.delete(pool)
     session.commit()
diff --git a/airflow/api_connexion/endpoints/connection_endpoint.py b/airflow/api_connexion/endpoints/connection_endpoint.py
index ecee686..df3bd41 100644
--- a/airflow/api_connexion/endpoints/connection_endpoint.py
+++ b/airflow/api_connexion/endpoints/connection_endpoint.py
@@ -124,4 +124,4 @@ def post_connection(session):
         session.add(connection)
         session.commit()
         return connection_schema.dump(connection)
-    raise AlreadyExists(detail="Connection already exist. ID: %s" % conn_id)
+    raise AlreadyExists(detail=f"Connection already exist. ID: {conn_id}")
diff --git a/airflow/cli/commands/dag_command.py b/airflow/cli/commands/dag_command.py
index 40f8834..9b050ec 100644
--- a/airflow/cli/commands/dag_command.py
+++ b/airflow/cli/commands/dag_command.py
@@ -169,7 +169,7 @@ def set_is_paused(is_paused, args):
 
     dag.set_is_paused(is_paused=is_paused)
 
-    print("Dag: {}, paused: {}".format(args.dag_id, str(is_paused)))
+    print(f"Dag: {args.dag_id}, paused: {is_paused}")
 
 
 def dag_show(args):
diff --git a/airflow/cli/commands/task_command.py b/airflow/cli/commands/task_command.py
index c2794d4..fac4c26 100644
--- a/airflow/cli/commands/task_command.py
+++ b/airflow/cli/commands/task_command.py
@@ -406,14 +406,11 @@ def task_render(args):
     for attr in task.__class__.template_fields:
         print(
             textwrap.dedent(
-                """\
+                f"""        # ----------------------------------------------------------
+        # property: {attr}
         # ----------------------------------------------------------
-        # property: {}
-        # ----------------------------------------------------------
-        {}
-        """.format(
-                    attr, getattr(task, attr)
-                )
+        {getattr(task, attr)}
+        """
             )
         )
 
diff --git a/airflow/cli/commands/user_command.py b/airflow/cli/commands/user_command.py
index 3fd80bd..1274745 100644
--- a/airflow/cli/commands/user_command.py
+++ b/airflow/cli/commands/user_command.py
@@ -98,7 +98,7 @@ def users_manage_role(args, remove=False):
     appbuilder = cached_app().appbuilder  # pylint: disable=no-member
     user = appbuilder.sm.find_user(username=args.username) or appbuilder.sm.find_user(email=args.email)
     if not user:
-        raise SystemExit('User "{}" does not exist'.format(args.username or args.email))
+        raise SystemExit(f'User "{args.username or args.email}" does not exist')
 
     role = appbuilder.sm.find_role(args.role)
     if not role:
@@ -144,7 +144,7 @@ def users_export(args):
 
     with open(args.export, 'w') as file:
         file.write(json.dumps(users, sort_keys=True, indent=4))
-        print("{} users successfully exported to {}".format(len(users), file.name))
+        print(f"{len(users)} users successfully exported to {file.name}")
 
 
 @cli_utils.action_logging
@@ -191,7 +191,7 @@ def _import_users(users_list):  # pylint: disable=redefined-outer-name
 
         existing_user = appbuilder.sm.find_user(email=user['email'])
         if existing_user:
-            print("Found existing user with email '{}'".format(user['email']))
+            print(f"Found existing user with email '{user['email']}'")
             existing_user.roles = roles
             existing_user.first_name = user['firstname']
             existing_user.last_name = user['lastname']
@@ -206,7 +206,7 @@ def _import_users(users_list):  # pylint: disable=redefined-outer-name
             appbuilder.sm.update_user(existing_user)
             users_updated.append(user['email'])
         else:
-            print("Creating new user with email '{}'".format(user['email']))
+            print(f"Creating new user with email '{user['email']}'")
             appbuilder.sm.add_user(
                 username=user['username'],
                 first_name=user['firstname'],
diff --git a/airflow/cli/commands/variable_command.py b/airflow/cli/commands/variable_command.py
index 55cf94b..526f094 100644
--- a/airflow/cli/commands/variable_command.py
+++ b/airflow/cli/commands/variable_command.py
@@ -92,7 +92,7 @@ def _import_helper(filepath):
             try:
                 Variable.set(k, v, serialize_json=not isinstance(v, str))
             except Exception as e:  # pylint: disable=broad-except
-                print('Variable import failed: {}'.format(repr(e)))
+                print(f'Variable import failed: {repr(e)}')
                 fail_count += 1
             else:
                 suc_count += 1
diff --git a/airflow/configuration.py b/airflow/configuration.py
index ecd2bc6..5b765de 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -536,7 +536,7 @@ class AirflowConfigParser(ConfigParser):  # pylint: disable=too-many-ancestors
         # This is based on the configparser.RawConfigParser.write method code to add support for
         # reading options from environment variables.
         if space_around_delimiters:
-            delimiter = " {} ".format(self._delimiters[0])
+            delimiter = f" {self._delimiters[0]} "
         else:
             delimiter = self._delimiters[0]
         if self._defaults:
diff --git a/airflow/example_dags/example_passing_params_via_test_command.py b/airflow/example_dags/example_passing_params_via_test_command.py
index 8eaadd7..456def2 100644
--- a/airflow/example_dags/example_passing_params_via_test_command.py
+++ b/airflow/example_dags/example_passing_params_via_test_command.py
@@ -52,7 +52,7 @@ def my_py_command(test_mode, params):
             )
         )
     # Print out the value of "miff", passed in below via the Python Operator
-    print(" 'miff' was passed in via task params = {}".format(params["miff"]))
+    print(f" 'miff' was passed in via task params = {params['miff']}")
     return 1
 
 
@@ -83,8 +83,8 @@ def print_env_vars(test_mode):
     --env-vars '{"foo":"bar"}'`
     """
     if test_mode:
-        print("foo={}".format(os.environ.get('foo')))
-        print("AIRFLOW_TEST_MODE={}".format(os.environ.get('AIRFLOW_TEST_MODE')))
+        print(f"foo={os.environ.get('foo')}")
+        print(f"AIRFLOW_TEST_MODE={os.environ.get('AIRFLOW_TEST_MODE')}")
 
 
 env_var_test_task = PythonOperator(task_id='env_var_test_task', python_callable=print_env_vars, dag=dag)
diff --git a/airflow/example_dags/example_trigger_target_dag.py b/airflow/example_dags/example_trigger_target_dag.py
index 0355275..f431dc4 100644
--- a/airflow/example_dags/example_trigger_target_dag.py
+++ b/airflow/example_dags/example_trigger_target_dag.py
@@ -43,7 +43,7 @@ def run_this_func(**context):
     :param context: The execution context
     :type context: dict
     """
-    print("Remotely received value of {} for key=message".format(context["dag_run"].conf["message"]))
+    print(f"Remotely received value of {context['dag_run'].conf['message']} for key=message")
 
 
 run_this = PythonOperator(task_id="run_this", python_callable=run_this_func, dag=dag)
diff --git a/airflow/example_dags/subdags/subdag.py b/airflow/example_dags/subdags/subdag.py
index 6a30415..849b294 100644
--- a/airflow/example_dags/subdags/subdag.py
+++ b/airflow/example_dags/subdags/subdag.py
@@ -43,7 +43,7 @@ def subdag(parent_dag_name, child_dag_name, args):
 
     for i in range(5):
         DummyOperator(
-            task_id='{}-task-{}'.format(child_dag_name, i + 1),
+            task_id=f'{child_dag_name}-task-{i + 1}',
             default_args=args,
             dag=dag_subdag,
         )
diff --git a/airflow/example_dags/tutorial_taskflow_api_etl.py b/airflow/example_dags/tutorial_taskflow_api_etl.py
index e50ae5f..cfcfbd9 100644
--- a/airflow/example_dags/tutorial_taskflow_api_etl.py
+++ b/airflow/example_dags/tutorial_taskflow_api_etl.py
@@ -91,7 +91,7 @@ def tutorial_taskflow_api_etl():
         instead of saving it to end user review, just prints it out.
         """
 
-        print("Total order value is: %.2f" % total_order_value)
+        print(f"Total order value is: {total_order_value:.2f}")
 
     # [END load]
 
diff --git a/airflow/hooks/dbapi.py b/airflow/hooks/dbapi.py
index 9e340e4..9821643 100644
--- a/airflow/hooks/dbapi.py
+++ b/airflow/hooks/dbapi.py
@@ -248,7 +248,7 @@ class DbApiHook(BaseHook):
             sql = "INSERT INTO "
         else:
             sql = "REPLACE INTO "
-        sql += "{} {} VALUES ({})".format(table, target_fields, ",".join(placeholders))
+        sql += f"{table} {target_fields} VALUES ({','.join(placeholders)})"
         return sql
 
     def insert_rows(self, table, rows, target_fields=None, commit_every=1000, replace=False, **kwargs):
diff --git a/airflow/kubernetes/refresh_config.py b/airflow/kubernetes/refresh_config.py
index 0004cac..023cd32 100644
--- a/airflow/kubernetes/refresh_config.py
+++ b/airflow/kubernetes/refresh_config.py
@@ -61,7 +61,7 @@ class RefreshKubeConfigLoader(KubeConfigLoader):
             if 'token' not in status:
                 logging.error('exec: missing token field in plugin output')
                 return None
-            self.token = "Bearer %s" % status['token']  # pylint: disable=W0201
+            self.token = f"Bearer {status['token']}"  # pylint: disable=W0201
             ts_str = status.get('expirationTimestamp')
             if ts_str:
                 self.api_key_expire_ts = _parse_timestamp(ts_str)
diff --git a/airflow/models/connection.py b/airflow/models/connection.py
index 1159a44..4edd6b7 100644
--- a/airflow/models/connection.py
+++ b/airflow/models/connection.py
@@ -165,7 +165,7 @@ class Connection(Base, LoggingMixin):  # pylint: disable=too-many-instance-attri
 
     def get_uri(self) -> str:
         """Return connection in URI format"""
-        uri = '{}://'.format(str(self.conn_type).lower().replace('_', '-'))
+        uri = f"{str(self.conn_type).lower().replace('_', '-')}://"
 
         authority_block = ''
         if self.login is not None:
@@ -190,12 +190,12 @@ class Connection(Base, LoggingMixin):  # pylint: disable=too-many-instance-attri
                 host_block += f'@:{self.port}'
 
         if self.schema:
-            host_block += '/{}'.format(quote(self.schema, safe=''))
+            host_block += f"/{quote(self.schema, safe='')}"
 
         uri += host_block
 
         if self.extra_dejson:
-            uri += '?{}'.format(urlencode(self.extra_dejson))
+            uri += f'?{urlencode(self.extra_dejson)}'
 
         return uri
 
diff --git a/airflow/models/dag.py b/airflow/models/dag.py
index 8bb32db..15332f3 100644
--- a/airflow/models/dag.py
+++ b/airflow/models/dag.py
@@ -1538,7 +1538,7 @@ class DAG(LoggingMixin):
             dttm = timezone.utcnow()
             pickled = pickle.dumps(self)
             d['pickle_len'] = len(pickled)
-            d['pickling_duration'] = "{}".format(timezone.utcnow() - dttm)
+            d['pickling_duration'] = str(timezone.utcnow() - dttm)
         except Exception as e:
             self.log.debug(e)
             d['is_picklable'] = False
diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py
index d671a01..3c9f53a 100644
--- a/airflow/models/taskinstance.py
+++ b/airflow/models/taskinstance.py
@@ -1595,9 +1595,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
         yesterday_ds_nodash = yesterday_ds.replace('-', '')
         tomorrow_ds_nodash = tomorrow_ds.replace('-', '')
 
-        ti_key_str = "{dag_id}__{task_id}__{ds_nodash}".format(
-            dag_id=task.dag_id, task_id=task.task_id, ds_nodash=ds_nodash
-        )
+        ti_key_str = f"{task.dag_id}__{task.task_id}__{ds_nodash}"
 
         if task.params:
             params.update(task.params)
diff --git a/airflow/models/xcom.py b/airflow/models/xcom.py
index 6428a40..844e38f 100644
--- a/airflow/models/xcom.py
+++ b/airflow/models/xcom.py
@@ -71,9 +71,7 @@ class BaseXCom(Base, LoggingMixin):
             self.value = pickle.loads(self.value)
 
     def __repr__(self):
-        return '<XCom "{key}" ({task_id} @ {execution_date})>'.format(
-            key=self.key, task_id=self.task_id, execution_date=self.execution_date
-        )
+        return f'<XCom "{self.key}" ({self.task_id} @ {self.execution_date})>'
 
     @classmethod
     @provide_session
diff --git a/airflow/operators/sql.py b/airflow/operators/sql.py
index 59c2e60..00f7e13 100644
--- a/airflow/operators/sql.py
+++ b/airflow/operators/sql.py
@@ -293,9 +293,7 @@ class SQLIntervalCheckOperator(BaseOperator):
         self.days_back = -abs(days_back)
         self.conn_id = conn_id
         sqlexp = ", ".join(self.metrics_sorted)
-        sqlt = "SELECT {sqlexp} FROM {table} WHERE {date_filter_column}=".format(
-            sqlexp=sqlexp, table=table, date_filter_column=date_filter_column
-        )
+        sqlt = f"SELECT {sqlexp} FROM {table} WHERE {date_filter_column}="
 
         self.sql1 = sqlt + "'{{ ds }}'"
         self.sql2 = sqlt + "'{{ macros.ds_add(ds, " + str(self.days_back) + ") }}'"
@@ -360,9 +358,7 @@ class SQLIntervalCheckOperator(BaseOperator):
                     ratios[k],
                     self.metrics_thresholds[k],
                 )
-            raise AirflowException(
-                "The following tests have failed:\n {}".format(", ".join(sorted(failed_tests)))
-            )
+            raise AirflowException(f"The following tests have failed:\n {', '.join(sorted(failed_tests))}")
 
         self.log.info("All tests have passed")
 
@@ -535,7 +531,7 @@ class BranchSQLOperator(BaseOperator, SkipMixin):
         self._hook = self._get_hook()
 
         if self._hook is None:
-            raise AirflowException("Failed to establish connection to '%s'" % self.conn_id)
+            raise AirflowException(f"Failed to establish connection to '{self.conn_id}'")
 
         if self.sql is None:
             raise AirflowException("Expected 'sql' parameter is missing.")
@@ -584,14 +580,14 @@ class BranchSQLOperator(BaseOperator, SkipMixin):
                     follow_branch = self.follow_task_ids_if_true
             else:
                 raise AirflowException(
-                    "Unexpected query return result '{}' type '{}'".format(query_result, type(query_result))
+                    f"Unexpected query return result '{query_result}' type '{type(query_result)}'"
                 )
 
             if follow_branch is None:
                 follow_branch = self.follow_task_ids_if_false
         except ValueError:
             raise AirflowException(
-                "Unexpected query return result '{}' type '{}'".format(query_result, type(query_result))
+                f"Unexpected query return result '{query_result}' type '{type(query_result)}'"
             )
 
         self.skip_all_except(context["ti"], follow_branch)
diff --git a/airflow/providers/amazon/aws/hooks/datasync.py b/airflow/providers/amazon/aws/hooks/datasync.py
index 4925529..3c82749 100644
--- a/airflow/providers/amazon/aws/hooks/datasync.py
+++ b/airflow/providers/amazon/aws/hooks/datasync.py
@@ -57,7 +57,7 @@ class AWSDataSyncHook(AwsBaseHook):
         self.tasks: list = []
         # wait_interval_seconds = 0 is used during unit tests
         if wait_interval_seconds < 0 or wait_interval_seconds > 15 * 60:
-            raise ValueError("Invalid wait_interval_seconds %s" % wait_interval_seconds)
+            raise ValueError(f"Invalid wait_interval_seconds {wait_interval_seconds}")
         self.wait_interval_seconds = wait_interval_seconds
 
     def create_location(self, location_uri: str, **create_location_kwargs) -> str:
@@ -314,4 +314,4 @@ class AWSDataSyncHook(AwsBaseHook):
             return False
         if iterations <= 0:
             raise AirflowTaskTimeout("Max iterations exceeded!")
-        raise AirflowException("Unknown status: %s" % status)  # Should never happen
+        raise AirflowException(f"Unknown status: {status}")  # Should never happen
diff --git a/airflow/providers/amazon/aws/hooks/dynamodb.py b/airflow/providers/amazon/aws/hooks/dynamodb.py
index a829f8d..a66f2b0 100644
--- a/airflow/providers/amazon/aws/hooks/dynamodb.py
+++ b/airflow/providers/amazon/aws/hooks/dynamodb.py
@@ -58,6 +58,4 @@ class AwsDynamoDBHook(AwsBaseHook):
                     batch.put_item(Item=item)
             return True
         except Exception as general_error:
-            raise AirflowException(
-                "Failed to insert items in dynamodb, error: {error}".format(error=str(general_error))
-            )
+            raise AirflowException(f"Failed to insert items in dynamodb, error: {str(general_error)}")
diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py b/airflow/providers/amazon/aws/hooks/sagemaker.py
index ab5fdd1..d6548ad 100644
--- a/airflow/providers/amazon/aws/hooks/sagemaker.py
+++ b/airflow/providers/amazon/aws/hooks/sagemaker.py
@@ -126,7 +126,7 @@ def secondary_training_status_message(
     for transition in transitions_to_print:
         message = transition['StatusMessage']
         time_str = timezone.convert_to_utc(job_description['LastModifiedTime']).strftime('%Y-%m-%d %H:%M:%S')
-        status_strs.append('{} {} - {}'.format(time_str, transition['Status'], message))
+        status_strs.append(f"{time_str} {transition['Status']} - {message}")
 
     return '\n'.join(status_strs)
 
@@ -740,7 +740,7 @@ class SageMakerHook(AwsBaseHook):  # pylint: disable=too-many-public-methods
             if status in non_terminal_states:
                 running = True
             elif status in self.failed_states:
-                raise AirflowException('SageMaker job failed because %s' % response['FailureReason'])
+                raise AirflowException(f"SageMaker job failed because {response['FailureReason']}")
             else:
                 running = False
 
diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py
index d6e5326..7fdeac3 100644
--- a/airflow/providers/amazon/aws/log/s3_task_handler.py
+++ b/airflow/providers/amazon/aws/log/s3_task_handler.py
@@ -118,7 +118,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
             log_exists = self.s3_log_exists(remote_loc)
         except Exception as error:  # pylint: disable=broad-except
             self.log.exception(error)
-            log = '*** Failed to verify remote log exists {}.\n{}\n'.format(remote_loc, str(error))
+            log = f'*** Failed to verify remote log exists {remote_loc}.\n{str(error)}\n'
 
         if log_exists:
             # If S3 remote file exists, we do not fetch logs from task instance
diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/airflow/providers/amazon/aws/operators/datasync.py
index fab6898..f5c129a 100644
--- a/airflow/providers/amazon/aws/operators/datasync.py
+++ b/airflow/providers/amazon/aws/operators/datasync.py
@@ -351,7 +351,7 @@ class AWSDataSyncOperator(BaseOperator):
                     self.log.log(level, '%s=%s', k, v)
 
         if not result:
-            raise AirflowException("Failed TaskExecutionArn %s" % self.task_execution_arn)
+            raise AirflowException(f"Failed TaskExecutionArn {self.task_execution_arn}")
 
     def on_kill(self) -> None:
         """Cancel the submitted DataSync task."""
diff --git a/airflow/providers/amazon/aws/operators/emr_add_steps.py b/airflow/providers/amazon/aws/operators/emr_add_steps.py
index 44bc20c..2ffd5cc 100644
--- a/airflow/providers/amazon/aws/operators/emr_add_steps.py
+++ b/airflow/providers/amazon/aws/operators/emr_add_steps.py
@@ -100,7 +100,7 @@ class EmrAddStepsOperator(BaseOperator):
         response = emr.add_job_flow_steps(JobFlowId=job_flow_id, Steps=steps)
 
         if not response['ResponseMetadata']['HTTPStatusCode'] == 200:
-            raise AirflowException('Adding steps failed: %s' % response)
+            raise AirflowException(f'Adding steps failed: {response}')
         else:
             self.log.info('Steps %s added to JobFlow', response['StepIds'])
             return response['StepIds']
diff --git a/airflow/providers/amazon/aws/operators/emr_create_job_flow.py b/airflow/providers/amazon/aws/operators/emr_create_job_flow.py
index b3b6808..d8dc31e 100644
--- a/airflow/providers/amazon/aws/operators/emr_create_job_flow.py
+++ b/airflow/providers/amazon/aws/operators/emr_create_job_flow.py
@@ -78,7 +78,7 @@ class EmrCreateJobFlowOperator(BaseOperator):
         response = emr.create_job_flow(job_flow_overrides)
 
         if not response['ResponseMetadata']['HTTPStatusCode'] == 200:
-            raise AirflowException('JobFlow creation failed: %s' % response)
+            raise AirflowException(f'JobFlow creation failed: {response}')
         else:
             self.log.info('JobFlow with id %s created', response['JobFlowId'])
             return response['JobFlowId']
diff --git a/airflow/providers/amazon/aws/operators/emr_modify_cluster.py b/airflow/providers/amazon/aws/operators/emr_modify_cluster.py
index f0e4693..a04e845 100644
--- a/airflow/providers/amazon/aws/operators/emr_modify_cluster.py
+++ b/airflow/providers/amazon/aws/operators/emr_modify_cluster.py
@@ -66,7 +66,7 @@ class EmrModifyClusterOperator(BaseOperator):
         )
 
         if response['ResponseMetadata']['HTTPStatusCode'] != 200:
-            raise AirflowException('Modify cluster failed: %s' % response)
+            raise AirflowException(f'Modify cluster failed: {response}')
         else:
             self.log.info('Steps concurrency level %d', response['StepConcurrencyLevel'])
             return response['StepConcurrencyLevel']
diff --git a/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py b/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py
index 0e7e17f..9d75eaf 100644
--- a/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py
+++ b/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py
@@ -51,6 +51,6 @@ class EmrTerminateJobFlowOperator(BaseOperator):
         response = emr.terminate_job_flows(JobFlowIds=[self.job_flow_id])
 
         if not response['ResponseMetadata']['HTTPStatusCode'] == 200:
-            raise AirflowException('JobFlow termination failed: %s' % response)
+            raise AirflowException(f'JobFlow termination failed: {response}')
         else:
             self.log.info('JobFlow with id %s terminated', self.job_flow_id)
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py b/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py
index 53cfd93..35b0b11 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py
@@ -150,7 +150,7 @@ class SageMakerEndpointOperator(SageMakerBaseOperator):
             )
 
         if response['ResponseMetadata']['HTTPStatusCode'] != 200:
-            raise AirflowException('Sagemaker endpoint creation failed: %s' % response)
+            raise AirflowException(f'Sagemaker endpoint creation failed: {response}')
         else:
             return {
                 'EndpointConfig': self.hook.describe_endpoint_config(endpoint_info['EndpointConfigName']),
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py b/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py
index bbf2be1..a2add7b 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py
@@ -49,6 +49,6 @@ class SageMakerEndpointConfigOperator(SageMakerBaseOperator):
         self.log.info('Creating SageMaker Endpoint Config %s.', self.config['EndpointConfigName'])
         response = self.hook.create_endpoint_config(self.config)
         if response['ResponseMetadata']['HTTPStatusCode'] != 200:
-            raise AirflowException('Sagemaker endpoint config creation failed: %s' % response)
+            raise AirflowException(f'Sagemaker endpoint config creation failed: {response}')
         else:
             return {'EndpointConfig': self.hook.describe_endpoint_config(self.config['EndpointConfigName'])}
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_model.py b/airflow/providers/amazon/aws/operators/sagemaker_model.py
index 25730ea..0e8cbf4 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_model.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_model.py
@@ -53,6 +53,6 @@ class SageMakerModelOperator(SageMakerBaseOperator):
         self.log.info('Creating SageMaker Model %s.', self.config['ModelName'])
         response = self.hook.create_model(self.config)
         if response['ResponseMetadata']['HTTPStatusCode'] != 200:
-            raise AirflowException('Sagemaker model creation failed: %s' % response)
+            raise AirflowException(f'Sagemaker model creation failed: {response}')
         else:
             return {'Model': self.hook.describe_model(self.config['ModelName'])}
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_processing.py b/airflow/providers/amazon/aws/operators/sagemaker_processing.py
index e56a987..271b46b 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_processing.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_processing.py
@@ -119,5 +119,5 @@ class SageMakerProcessingOperator(SageMakerBaseOperator):
             max_ingestion_time=self.max_ingestion_time,
         )
         if response['ResponseMetadata']['HTTPStatusCode'] != 200:
-            raise AirflowException('Sagemaker Processing Job creation failed: %s' % response)
+            raise AirflowException(f'Sagemaker Processing Job creation failed: {response}')
         return {'Processing': self.hook.describe_processing_job(self.config['ProcessingJobName'])}
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_training.py b/airflow/providers/amazon/aws/operators/sagemaker_training.py
index 29c34f6..7d9eaf2 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_training.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_training.py
@@ -117,6 +117,6 @@ class SageMakerTrainingOperator(SageMakerBaseOperator):
             max_ingestion_time=self.max_ingestion_time,
         )
         if response['ResponseMetadata']['HTTPStatusCode'] != 200:
-            raise AirflowException('Sagemaker Training Job creation failed: %s' % response)
+            raise AirflowException(f'Sagemaker Training Job creation failed: {response}')
         else:
             return {'Training': self.hook.describe_training_job(self.config['TrainingJobName'])}
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_transform.py b/airflow/providers/amazon/aws/operators/sagemaker_transform.py
index 7caf9f1..b264d2d 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_transform.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_transform.py
@@ -116,7 +116,7 @@ class SageMakerTransformOperator(SageMakerBaseOperator):
             max_ingestion_time=self.max_ingestion_time,
         )
         if response['ResponseMetadata']['HTTPStatusCode'] != 200:
-            raise AirflowException('Sagemaker transform Job creation failed: %s' % response)
+            raise AirflowException(f'Sagemaker transform Job creation failed: {response}')
         else:
             return {
                 'Model': self.hook.describe_model(transform_config['ModelName']),
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_tuning.py b/airflow/providers/amazon/aws/operators/sagemaker_tuning.py
index f8df36a..38664a8 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_tuning.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_tuning.py
@@ -92,6 +92,6 @@ class SageMakerTuningOperator(SageMakerBaseOperator):
             max_ingestion_time=self.max_ingestion_time,
         )
         if response['ResponseMetadata']['HTTPStatusCode'] != 200:
-            raise AirflowException('Sagemaker Tuning Job creation failed: %s' % response)
+            raise AirflowException(f'Sagemaker Tuning Job creation failed: {response}')
         else:
             return {'Tuning': self.hook.describe_tuning_job(self.config['HyperParameterTuningJobName'])}
diff --git a/airflow/providers/amazon/aws/sensors/sagemaker_base.py b/airflow/providers/amazon/aws/sensors/sagemaker_base.py
index 16c8cd7..6572122 100644
--- a/airflow/providers/amazon/aws/sensors/sagemaker_base.py
+++ b/airflow/providers/amazon/aws/sensors/sagemaker_base.py
@@ -63,7 +63,7 @@ class SageMakerBaseSensor(BaseSensorOperator):
 
         if state in self.failed_states():
             failed_reason = self.get_failed_reason_from_response(response)
-            raise AirflowException('Sagemaker job failed for the following reason: %s' % failed_reason)
+            raise AirflowException(f'Sagemaker job failed for the following reason: {failed_reason}')
         return True
 
     def non_terminal_states(self) -> Set[str]:
diff --git a/airflow/providers/apache/druid/hooks/druid.py b/airflow/providers/apache/druid/hooks/druid.py
index b4af207..69d33a1 100644
--- a/airflow/providers/apache/druid/hooks/druid.py
+++ b/airflow/providers/apache/druid/hooks/druid.py
@@ -68,9 +68,7 @@ class DruidHook(BaseHook):
         port = conn.port
         conn_type = 'http' if not conn.conn_type else conn.conn_type
         endpoint = conn.extra_dejson.get('endpoint', '')
-        return "{conn_type}://{host}:{port}/{endpoint}".format(
-            conn_type=conn_type, host=host, port=port, endpoint=endpoint
-        )
+        return f"{conn_type}://{host}:{port}/{endpoint}"
 
     def get_auth(self) -> Optional[requests.auth.HTTPBasicAuth]:
         """
diff --git a/airflow/providers/apache/hdfs/sensors/hdfs.py b/airflow/providers/apache/hdfs/sensors/hdfs.py
index 65cfb5b..867c193 100644
--- a/airflow/providers/apache/hdfs/sensors/hdfs.py
+++ b/airflow/providers/apache/hdfs/sensors/hdfs.py
@@ -141,7 +141,7 @@ class HdfsRegexSensor(HdfsSensor):
         result = [
             f
             for f in sb_client.ls([self.filepath], include_toplevel=False)
-            if f['file_type'] == 'f' and self.regex.match(f['path'].replace('%s/' % self.filepath, ''))
+            if f['file_type'] == 'f' and self.regex.match(f['path'].replace(f'{self.filepath}/', ''))
         ]
         result = self.filter_for_ignored_ext(result, self.ignored_ext, self.ignore_copying)
         result = self.filter_for_filesize(result, self.file_size)
diff --git a/airflow/providers/apache/hive/hooks/hive.py b/airflow/providers/apache/hive/hooks/hive.py
index ab7b7b7..41f5cff 100644
--- a/airflow/providers/apache/hive/hooks/hive.py
+++ b/airflow/providers/apache/hive/hooks/hive.py
@@ -133,9 +133,7 @@ class HiveCliHook(BaseHook):
 
         if self.use_beeline:
             hive_bin = 'beeline'
-            jdbc_url = "jdbc:hive2://{host}:{port}/{schema}".format(
-                host=conn.host, port=conn.port, schema=conn.schema
-            )
+            jdbc_url = f"jdbc:hive2://{conn.host}:{conn.port}/{conn.schema}"
             if conf.get('core', 'security') == 'kerberos':
                 template = conn.extra_dejson.get('principal', "hive/_HOST@EXAMPLE.COM")
                 if "_HOST" in template:
@@ -143,9 +141,7 @@ class HiveCliHook(BaseHook):
 
                 proxy_user = self._get_proxy_user()
 
-                jdbc_url += ";principal={template};{proxy_user}".format(
-                    template=template, proxy_user=proxy_user
-                )
+                jdbc_url += f";principal={template};{proxy_user}"
             elif self.auth:
                 jdbc_url += ";auth=" + self.auth
 
diff --git a/airflow/providers/apache/hive/operators/hive_stats.py b/airflow/providers/apache/hive/operators/hive_stats.py
index d4de591..88faa40 100644
--- a/airflow/providers/apache/hive/operators/hive_stats.py
+++ b/airflow/providers/apache/hive/operators/hive_stats.py
@@ -135,9 +135,7 @@ class HiveStatsCollectionOperator(BaseOperator):
 
         where_clause_ = [f"{k} = '{v}'" for k, v in self.partition.items()]
         where_clause = " AND\n        ".join(where_clause_)
-        sql = "SELECT {exprs_str} FROM {table} WHERE {where_clause};".format(
-            exprs_str=exprs_str, table=self.table, where_clause=where_clause
-        )
+        sql = f"SELECT {exprs_str} FROM {self.table} WHERE {where_clause};"
 
         presto = PrestoHook(presto_conn_id=self.presto_conn_id)
         self.log.info('Executing SQL check: %s', sql)
@@ -150,26 +148,22 @@ class HiveStatsCollectionOperator(BaseOperator):
 
         self.log.info("Deleting rows from previous runs if they exist")
         mysql = MySqlHook(self.mysql_conn_id)
-        sql = """
+        sql = f"""
         SELECT 1 FROM hive_stats
         WHERE
-            table_name='{table}' AND
+            table_name='{self.table}' AND
             partition_repr='{part_json}' AND
-            dttm='{dttm}'
+            dttm='{self.dttm}'
         LIMIT 1;
-        """.format(
-            table=self.table, part_json=part_json, dttm=self.dttm
-        )
+        """
         if mysql.get_records(sql):
-            sql = """
+            sql = f"""
             DELETE FROM hive_stats
             WHERE
-                table_name='{table}' AND
+                table_name='{self.table}' AND
                 partition_repr='{part_json}' AND
-                dttm='{dttm}';
-            """.format(
-                table=self.table, part_json=part_json, dttm=self.dttm
-            )
+                dttm='{self.dttm}';
+            """
             mysql.run(sql)
 
         self.log.info("Pivoting and loading cells into the Airflow db")
diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc.py b/airflow/providers/apache/spark/hooks/spark_jdbc.py
index 7a22c66..f90cea2 100644
--- a/airflow/providers/apache/spark/hooks/spark_jdbc.py
+++ b/airflow/providers/apache/spark/hooks/spark_jdbc.py
@@ -207,7 +207,7 @@ class SparkJDBCHook(SparkSubmitHook):
         if self._jdbc_connection['url']:
             arguments += [
                 '-url',
-                "{}{}/{}".format(jdbc_conn['conn_prefix'], jdbc_conn['url'], jdbc_conn['schema']),
+                f"{jdbc_conn['conn_prefix']}{jdbc_conn['url']}/{jdbc_conn['schema']}",
             ]
         if self._jdbc_connection['user']:
             arguments += ['-user', self._jdbc_connection['user']]
diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/airflow/providers/apache/spark/hooks/spark_submit.py
index ac1a83a..e7f2186 100644
--- a/airflow/providers/apache/spark/hooks/spark_submit.py
+++ b/airflow/providers/apache/spark/hooks/spark_submit.py
@@ -291,7 +291,7 @@ class SparkSubmitHook(BaseHook, LoggingMixin):
         connection_cmd += ["--master", self._connection['master']]
 
         for key in self._conf:
-            connection_cmd += ["--conf", "{}={}".format(key, str(self._conf[key]))]
+            connection_cmd += ["--conf", f"{key}={str(self._conf[key])}"]
         if self._env_vars and (self._is_kubernetes or self._is_yarn):
             if self._is_yarn:
                 tmpl = "spark.yarn.appMasterEnv.{}={}"
@@ -308,7 +308,7 @@ class SparkSubmitHook(BaseHook, LoggingMixin):
         if self._is_kubernetes and self._connection['namespace']:
             connection_cmd += [
                 "--conf",
-                "spark.kubernetes.namespace={}".format(self._connection['namespace']),
+                f"spark.kubernetes.namespace={self._connection['namespace']}",
             ]
         if self._files:
             connection_cmd += ["--files", self._files]
@@ -378,9 +378,7 @@ class SparkSubmitHook(BaseHook, LoggingMixin):
                 "/usr/bin/curl",
                 "--max-time",
                 str(curl_max_wait_time),
-                "{host}/v1/submissions/status/{submission_id}".format(
-                    host=spark_host, submission_id=self._driver_id
-                ),
+                f"{spark_host}/v1/submissions/status/{self._driver_id}",
             ]
             self.log.info(connection_cmd)
 
diff --git a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
index cf27713..ca82918 100644
--- a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
+++ b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
@@ -29,7 +29,7 @@ def _load_body_to_dict(body):
     try:
         body_dict = yaml.safe_load(body)
     except yaml.YAMLError as e:
-        raise AirflowException("Exception when loading resource definition: %s\n" % e)
+        raise AirflowException(f"Exception when loading resource definition: {e}\n")
     return body_dict
 
 
@@ -169,7 +169,7 @@ class KubernetesHook(BaseHook):
             self.log.debug("Response: %s", response)
             return response
         except client.rest.ApiException as e:
-            raise AirflowException("Exception when calling -> create_custom_object: %s\n" % e)
+            raise AirflowException(f"Exception when calling -> create_custom_object: {e}\n")
 
     def get_custom_object(
         self, group: str, version: str, plural: str, name: str, namespace: Optional[str] = None
@@ -197,7 +197,7 @@ class KubernetesHook(BaseHook):
             )
             return response
         except client.rest.ApiException as e:
-            raise AirflowException("Exception when calling -> get_custom_object: %s\n" % e)
+            raise AirflowException(f"Exception when calling -> get_custom_object: {e}\n")
 
     def get_namespace(self) -> str:
         """Returns the namespace that defined in the connection"""
diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
index 3f42ab1..7b4022e 100644
--- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
+++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
@@ -324,8 +324,7 @@ class KubernetesPodOperator(BaseOperator):  # pylint: disable=too-many-instance-
 
             if len(pod_list.items) > 1 and self.reattach_on_restart:
                 raise AirflowException(
-                    'More than one pod running with labels: '
-                    '{label_selector}'.format(label_selector=label_selector)
+                    f'More than one pod running with labels: {label_selector}'
                 )
 
             launcher = pod_launcher.PodLauncher(kube_client=client, extract_xcom=self.do_xcom_push)
diff --git a/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py b/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py
index 2fa3401..eb555f1 100644
--- a/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py
+++ b/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py
@@ -106,7 +106,7 @@ class SparkKubernetesSensor(BaseSensorOperator):
         if self.attach_log and application_state in self.FAILURE_STATES + self.SUCCESS_STATES:
             self._log_driver(application_state, response)
         if application_state in self.FAILURE_STATES:
-            raise AirflowException("Spark application failed with state: %s" % application_state)
+            raise AirflowException(f"Spark application failed with state: {application_state}")
         elif application_state in self.SUCCESS_STATES:
             self.log.info("Spark application ended successfully")
             return True
diff --git a/airflow/providers/databricks/hooks/databricks.py b/airflow/providers/databricks/hooks/databricks.py
index cc0fbc6..28953b9 100644
--- a/airflow/providers/databricks/hooks/databricks.py
+++ b/airflow/providers/databricks/hooks/databricks.py
@@ -178,7 +178,7 @@ class DatabricksHook(BaseHook):  # noqa
             auth = (self.databricks_conn.login, self.databricks_conn.password)
             host = self.databricks_conn.host
 
-        url = 'https://{host}/{endpoint}'.format(host=self._parse_host(host), endpoint=endpoint)
+        url = f'https://{self._parse_host(host)}/{endpoint}'
 
         if method == 'GET':
             request_func = requests.get
diff --git a/airflow/providers/docker/operators/docker_swarm.py b/airflow/providers/docker/operators/docker_swarm.py
index 18ecf9c..1098d98 100644
--- a/airflow/providers/docker/operators/docker_swarm.py
+++ b/airflow/providers/docker/operators/docker_swarm.py
@@ -126,7 +126,7 @@ class DockerSwarmOperator(DockerOperator):
                 restart_policy=types.RestartPolicy(condition='none'),
                 resources=types.Resources(mem_limit=self.mem_limit),
             ),
-            name='airflow-%s' % get_random_string(),
+            name=f'airflow-{get_random_string()}',
             labels={'name': f'airflow__{self.dag_id}__{self.task_id}'},
         )
 
diff --git a/airflow/providers/ftp/hooks/ftp.py b/airflow/providers/ftp/hooks/ftp.py
index 66a9080..6c3c064 100644
--- a/airflow/providers/ftp/hooks/ftp.py
+++ b/airflow/providers/ftp/hooks/ftp.py
@@ -180,7 +180,7 @@ class FTPHook(BaseHook):
         remote_path, remote_file_name = os.path.split(remote_full_path)
         conn.cwd(remote_path)
         self.log.info('Retrieving file from FTP: %s', remote_full_path)
-        conn.retrbinary('RETR %s' % remote_file_name, callback)
+        conn.retrbinary(f'RETR {remote_file_name}', callback)
         self.log.info('Finished retrieving file from FTP: %s', remote_full_path)
 
         if is_path and output_handle:
@@ -210,7 +210,7 @@ class FTPHook(BaseHook):
             input_handle = local_full_path_or_buffer
         remote_path, remote_file_name = os.path.split(remote_full_path)
         conn.cwd(remote_path)
-        conn.storbinary('STOR %s' % remote_file_name, input_handle)
+        conn.storbinary(f'STOR {remote_file_name}', input_handle)
 
         if is_path:
             input_handle.close()
diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py
index b7f46d9..0b3c3b3 100644
--- a/airflow/providers/google/cloud/hooks/bigquery.py
+++ b/airflow/providers/google/cloud/hooks/bigquery.py
@@ -2843,7 +2843,7 @@ def _split_tablename(
     cmpt = rest.split('.')
     if len(cmpt) == 3:
         if project_id:
-            raise ValueError("{var}Use either : or . to specify project".format(var=var_print(var_name)))
+            raise ValueError(f"{var_print(var_name)}Use either : or . to specify project")
         project_id = cmpt[0]
         dataset_id = cmpt[1]
         table_id = cmpt[2]
@@ -2887,7 +2887,7 @@ def _cleanse_time_partitioning(
 def _validate_value(key: Any, value: Any, expected_type: Type) -> None:
     """Function to check expected type and raise error if type is not correct"""
     if not isinstance(value, expected_type):
-        raise TypeError("{} argument must have a type {} not {}".format(key, expected_type, type(value)))
+        raise TypeError(f"{key} argument must have a type {expected_type} not {type(value)}")
 
 
 def _api_resource_configs_duplication_check(
diff --git a/airflow/providers/google/cloud/hooks/compute.py b/airflow/providers/google/cloud/hooks/compute.py
index ab84241..c4da00a 100644
--- a/airflow/providers/google/cloud/hooks/compute.py
+++ b/airflow/providers/google/cloud/hooks/compute.py
@@ -99,9 +99,7 @@ class ComputeEngineHook(GoogleBaseHook):
         try:
             operation_name = response["name"]
         except KeyError:
-            raise AirflowException(
-                "Wrong response '{}' returned - it should contain " "'name' field".format(response)
-            )
+            raise AirflowException(f"Wrong response '{response}' returned - it should contain 'name' field")
         self._wait_for_operation_to_complete(project_id=project_id, operation_name=operation_name, zone=zone)
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -130,9 +128,7 @@ class ComputeEngineHook(GoogleBaseHook):
         try:
             operation_name = response["name"]
         except KeyError:
-            raise AirflowException(
-                "Wrong response '{}' returned - it should contain " "'name' field".format(response)
-            )
+            raise AirflowException(f"Wrong response '{response}' returned - it should contain 'name' field")
         self._wait_for_operation_to_complete(project_id=project_id, operation_name=operation_name, zone=zone)
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -159,9 +155,7 @@ class ComputeEngineHook(GoogleBaseHook):
         try:
             operation_name = response["name"]
         except KeyError:
-            raise AirflowException(
-                "Wrong response '{}' returned - it should contain " "'name' field".format(response)
-            )
+            raise AirflowException(f"Wrong response '{response}' returned - it should contain 'name' field")
         self._wait_for_operation_to_complete(project_id=project_id, operation_name=operation_name, zone=zone)
 
     def _execute_set_machine_type(self, zone: str, resource_id: str, body: dict, project_id: str) -> dict:
@@ -233,9 +227,7 @@ class ComputeEngineHook(GoogleBaseHook):
         try:
             operation_name = response["name"]
         except KeyError:
-            raise AirflowException(
-                "Wrong response '{}' returned - it should contain " "'name' field".format(response)
-            )
+            raise AirflowException(f"Wrong response '{response}' returned - it should contain 'name' field")
         self._wait_for_operation_to_complete(project_id=project_id, operation_name=operation_name)
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -318,9 +310,7 @@ class ComputeEngineHook(GoogleBaseHook):
         try:
             operation_name = response["name"]
         except KeyError:
-            raise AirflowException(
-                "Wrong response '{}' returned - it should contain " "'name' field".format(response)
-            )
+            raise AirflowException(f"Wrong response '{response}' returned - it should contain 'name' field")
         self._wait_for_operation_to_complete(project_id=project_id, operation_name=operation_name, zone=zone)
 
     def _wait_for_operation_to_complete(
diff --git a/airflow/providers/google/cloud/hooks/dataflow.py b/airflow/providers/google/cloud/hooks/dataflow.py
index 0a665d4..da3e49c 100644
--- a/airflow/providers/google/cloud/hooks/dataflow.py
+++ b/airflow/providers/google/cloud/hooks/dataflow.py
@@ -385,21 +385,19 @@ class _DataflowJobsController(LoggingMixin):
         if job['currentState'] == DataflowJobStatus.JOB_STATE_DONE:
             return True
         elif job['currentState'] == DataflowJobStatus.JOB_STATE_FAILED:
-            raise Exception("Google Cloud Dataflow job {} has failed.".format(job['name']))
+            raise Exception(f"Google Cloud Dataflow job {job['name']} has failed.")
         elif job['currentState'] == DataflowJobStatus.JOB_STATE_CANCELLED:
-            raise Exception("Google Cloud Dataflow job {} was cancelled.".format(job['name']))
+            raise Exception(f"Google Cloud Dataflow job {job['name']} was cancelled.")
         elif job['currentState'] == DataflowJobStatus.JOB_STATE_DRAINED:
-            raise Exception("Google Cloud Dataflow job {} was drained.".format(job['name']))
+            raise Exception(f"Google Cloud Dataflow job {job['name']} was drained.")
         elif job['currentState'] == DataflowJobStatus.JOB_STATE_UPDATED:
-            raise Exception("Google Cloud Dataflow job {} was updated.".format(job['name']))
+            raise Exception(f"Google Cloud Dataflow job {job['name']} was updated.")
         elif job['currentState'] == DataflowJobStatus.JOB_STATE_RUNNING and wait_for_running:
             return True
         elif job['currentState'] in DataflowJobStatus.AWAITING_STATES:
             return self._wait_until_finished is False
         self.log.debug("Current job: %s", str(job))
-        raise Exception(
-            "Google Cloud Dataflow job {} was unknown state: {}".format(job["name"], job["currentState"])
-        )
+        raise Exception(f"Google Cloud Dataflow job {job['name']} was unknown state: {job['currentState']}")
 
     def wait_for_done(self) -> None:
         """Helper method to wait for result of submitted job."""
diff --git a/airflow/providers/google/cloud/hooks/functions.py b/airflow/providers/google/cloud/hooks/functions.py
index 8cb93df..49a6e2c 100644
--- a/airflow/providers/google/cloud/hooks/functions.py
+++ b/airflow/providers/google/cloud/hooks/functions.py
@@ -216,9 +216,7 @@ class CloudFunctionsHook(GoogleBaseHook):
         :type project_id: str
         :return: None
         """
-        name = "projects/{project_id}/locations/{location}/functions/{function_id}".format(
-            project_id=project_id, location=location, function_id=function_id
-        )
+        name = f"projects/{project_id}/locations/{location}/functions/{function_id}"
         # fmt: off
         response = self.get_conn().projects().locations().functions().call(  # pylint: disable=no-member
             name=name,
diff --git a/airflow/providers/google/cloud/hooks/kubernetes_engine.py b/airflow/providers/google/cloud/hooks/kubernetes_engine.py
index 930c1cd..604815d 100644
--- a/airflow/providers/google/cloud/hooks/kubernetes_engine.py
+++ b/airflow/providers/google/cloud/hooks/kubernetes_engine.py
@@ -99,7 +99,7 @@ class GKEHook(GoogleBaseHook):
             if operation.status == Operation.Status.RUNNING or operation.status == Operation.Status.PENDING:
                 time.sleep(OPERATIONAL_POLL_INTERVAL)
             else:
-                raise exceptions.GoogleCloudError("Operation has failed with status: %s" % operation.status)
+                raise exceptions.GoogleCloudError(f"Operation has failed with status: {operation.status}")
             # To update status of operation
             operation = self.get_operation(operation.name, project_id=project_id or self.project_id)
         return operation
diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py b/airflow/providers/google/cloud/log/gcs_task_handler.py
index 9fd456d..b57a18a 100644
--- a/airflow/providers/google/cloud/log/gcs_task_handler.py
+++ b/airflow/providers/google/cloud/log/gcs_task_handler.py
@@ -156,7 +156,7 @@ class GCSTaskHandler(FileTaskHandler, LoggingMixin):
             log = f'*** Reading remote log from {remote_loc}.\n{remote_log}\n'
             return log, {'end_of_log': True}
         except Exception as e:  # pylint: disable=broad-except
-            log = '*** Unable to read remote log from {}\n*** {}\n\n'.format(remote_loc, str(e))
+            log = f'*** Unable to read remote log from {remote_loc}\n*** {str(e)}\n\n'
             self.log.error(log)
             local_log, metadata = super()._read(ti, try_number)
             log += local_log
@@ -178,7 +178,7 @@ class GCSTaskHandler(FileTaskHandler, LoggingMixin):
             log = '\n'.join([old_log, log]) if old_log else log
         except Exception as e:  # pylint: disable=broad-except
             if not hasattr(e, 'resp') or e.resp.get('status') != '404':  # pylint: disable=no-member
-                log = '*** Previous log discarded: {}\n\n'.format(str(e)) + log
+                log = f'*** Previous log discarded: {str(e)}\n\n' + log
                 self.log.info("Previous log discarded: %s", e)
 
         try:
diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/airflow/providers/google/cloud/operators/bigquery.py
index ea2c9d3..28956ec 100644
--- a/airflow/providers/google/cloud/operators/bigquery.py
+++ b/airflow/providers/google/cloud/operators/bigquery.py
@@ -729,9 +729,7 @@ class BigQueryExecuteQueryOperator(BaseOperator):
                 for s in self.sql
             ]
         else:
-            raise AirflowException(
-                "argument 'sql' of type {} is neither a string nor an iterable".format(type(str))
-            )
+            raise AirflowException(f"argument 'sql' of type {type(str)} is neither a string nor an iterable")
         context['task_instance'].xcom_push(key='job_id', value=job_id)
 
     def on_kill(self) -> None:
diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py
index ac93915..3bcb0aa 100644
--- a/airflow/providers/google/cloud/operators/dataproc.py
+++ b/airflow/providers/google/cloud/operators/dataproc.py
@@ -1455,7 +1455,7 @@ class DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
     @staticmethod
     def _generate_temp_filename(filename):
         date = time.strftime('%Y%m%d%H%M%S')
-        return "{}_{}_{}".format(date, str(uuid.uuid4())[:8], ntpath.basename(filename))
+        return f"{date}_{str(uuid.uuid4())[:8]}_{ntpath.basename(filename)}"
 
     def _upload_file_temp(self, bucket, local_file):
         """Upload a local file to a Google Cloud Storage bucket."""
diff --git a/airflow/providers/jenkins/operators/jenkins_job_trigger.py b/airflow/providers/jenkins/operators/jenkins_job_trigger.py
index 69d0f63..91b8ca9 100644
--- a/airflow/providers/jenkins/operators/jenkins_job_trigger.py
+++ b/airflow/providers/jenkins/operators/jenkins_job_trigger.py
@@ -54,7 +54,7 @@ def jenkins_request_with_headers(jenkins_server: Jenkins, req: Request) -> Optio
         response_headers = response.headers
         if response_body is None:
             raise jenkins.EmptyResponseException(
-                "Error communicating with server[%s]: empty response" % jenkins_server.server
+                f"Error communicating with server[{jenkins_server.server}]: empty response"
             )
         return {'body': response_body.decode('utf-8'), 'headers': response_headers}
     except HTTPError as e:
@@ -66,9 +66,9 @@ def jenkins_request_with_headers(jenkins_server: Jenkins, req: Request) -> Optio
         else:
             raise
     except socket.timeout as e:
-        raise jenkins.TimeoutException('Error in request: %s' % e)
+        raise jenkins.TimeoutException(f'Error in request: {e}')
     except URLError as e:
-        raise JenkinsException('Error in request: %s' % e.reason)
+        raise JenkinsException(f'Error in request: {e.reason}')
     return None
 
 
diff --git a/airflow/providers/jira/hooks/jira.py b/airflow/providers/jira/hooks/jira.py
index 8951c12..5d186b8 100644
--- a/airflow/providers/jira/hooks/jira.py
+++ b/airflow/providers/jira/hooks/jira.py
@@ -82,8 +82,8 @@ class JiraHook(BaseHook):
                     proxies=self.proxies,
                 )
             except JIRAError as jira_error:
-                raise AirflowException('Failed to create jira client, jira error: %s' % str(jira_error))
+                raise AirflowException(f'Failed to create jira client, jira error: {str(jira_error)}')
             except Exception as e:
-                raise AirflowException('Failed to create jira client, error: %s' % str(e))
+                raise AirflowException(f'Failed to create jira client, error: {str(e)}')
 
         return self.client
diff --git a/airflow/providers/jira/operators/jira.py b/airflow/providers/jira/operators/jira.py
index 3550d1f..1e9530c 100644
--- a/airflow/providers/jira/operators/jira.py
+++ b/airflow/providers/jira/operators/jira.py
@@ -89,6 +89,6 @@ class JiraOperator(BaseOperator):
             return jira_result
 
         except JIRAError as jira_error:
-            raise AirflowException("Failed to execute jiraOperator, error: %s" % str(jira_error))
+            raise AirflowException(f"Failed to execute jiraOperator, error: {str(jira_error)}")
         except Exception as e:
-            raise AirflowException("Jira operator error: %s" % str(e))
+            raise AirflowException(f"Jira operator error: {str(e)}")
diff --git a/airflow/providers/microsoft/azure/operators/azure_container_instances.py b/airflow/providers/microsoft/azure/operators/azure_container_instances.py
index 74418ac..70355ed 100644
--- a/airflow/providers/microsoft/azure/operators/azure_container_instances.py
+++ b/airflow/providers/microsoft/azure/operators/azure_container_instances.py
@@ -278,7 +278,7 @@ class AzureContainerInstancesOperator(BaseOperator):
 
             self.log.info("Container had exit code: %s", exit_code)
             if exit_code != 0:
-                raise AirflowException("Container had a non-zero exit code, %s" % exit_code)
+                raise AirflowException(f"Container had a non-zero exit code, {exit_code}")
             return exit_code
 
         except CloudError:
diff --git a/airflow/providers/microsoft/winrm/operators/winrm.py b/airflow/providers/microsoft/winrm/operators/winrm.py
index 5500e3d..55d65cf 100644
--- a/airflow/providers/microsoft/winrm/operators/winrm.py
+++ b/airflow/providers/microsoft/winrm/operators/winrm.py
@@ -129,7 +129,7 @@ class WinRMOperator(BaseOperator):
             self.winrm_hook.winrm_protocol.close_shell(winrm_client)  # type: ignore[attr-defined]
 
         except Exception as e:
-            raise AirflowException("WinRM operator error: {}".format(str(e)))
+            raise AirflowException(f"WinRM operator error: {str(e)}")
 
         if return_code == 0:
             # returning output if do_xcom_push is set
diff --git a/airflow/providers/mysql/hooks/mysql.py b/airflow/providers/mysql/hooks/mysql.py
index 5da0be2..7d1f71f 100644
--- a/airflow/providers/mysql/hooks/mysql.py
+++ b/airflow/providers/mysql/hooks/mysql.py
@@ -164,12 +164,10 @@ class MySqlHook(DbApiHook):
         conn = self.get_conn()
         cur = conn.cursor()
         cur.execute(
-            """
+            f"""
             LOAD DATA LOCAL INFILE '{tmp_file}'
             INTO TABLE {table}
-            """.format(
-                tmp_file=tmp_file, table=table
-            )
+            """
         )
         conn.commit()
 
@@ -178,12 +176,10 @@ class MySqlHook(DbApiHook):
         conn = self.get_conn()
         cur = conn.cursor()
         cur.execute(
-            """
+            f"""
             SELECT * INTO OUTFILE '{tmp_file}'
             FROM {table}
-            """.format(
-                tmp_file=tmp_file, table=table
-            )
+            """
         )
         conn.commit()
 
@@ -251,17 +247,12 @@ class MySqlHook(DbApiHook):
         cursor = conn.cursor()
 
         cursor.execute(
-            """
+            f"""
             LOAD DATA LOCAL INFILE '{tmp_file}'
             {duplicate_key_handling}
             INTO TABLE {table}
             {extra_options}
-            """.format(
-                tmp_file=tmp_file,
-                table=table,
-                duplicate_key_handling=duplicate_key_handling,
-                extra_options=extra_options,
-            )
+            """
         )
 
         cursor.close()
diff --git a/airflow/providers/opsgenie/hooks/opsgenie_alert.py b/airflow/providers/opsgenie/hooks/opsgenie_alert.py
index aad834c..60f1734 100644
--- a/airflow/providers/opsgenie/hooks/opsgenie_alert.py
+++ b/airflow/providers/opsgenie/hooks/opsgenie_alert.py
@@ -82,5 +82,5 @@ class OpsgenieAlertHook(HttpHook):
         return self.run(
             endpoint='v2/alerts',
             data=json.dumps(payload),
-            headers={'Content-Type': 'application/json', 'Authorization': 'GenieKey %s' % api_key},
+            headers={'Content-Type': 'application/json', 'Authorization': f'GenieKey {api_key}'},
         )
diff --git a/airflow/providers/oracle/hooks/oracle.py b/airflow/providers/oracle/hooks/oracle.py
index bfb40a4..1c5c536 100644
--- a/airflow/providers/oracle/hooks/oracle.py
+++ b/airflow/providers/oracle/hooks/oracle.py
@@ -178,7 +178,7 @@ class OracleHook(DbApiHook):
                 else:
                     lst.append(str(cell))
             values = tuple(lst)
-            sql = 'INSERT /*+ APPEND */ INTO {} {} VALUES ({})'.format(table, target_fields, ','.join(values))
+            sql = f"INSERT /*+ APPEND */ INTO {table} {target_fields} VALUES ({','.join(values)})"
             cur.execute(sql)
             if i % commit_every == 0:
                 conn.commit()  # type: ignore[attr-defined]
diff --git a/airflow/providers/pagerduty/hooks/pagerduty.py b/airflow/providers/pagerduty/hooks/pagerduty.py
index 4de43df..e42357a 100644
--- a/airflow/providers/pagerduty/hooks/pagerduty.py
+++ b/airflow/providers/pagerduty/hooks/pagerduty.py
@@ -145,7 +145,7 @@ class PagerdutyHook(BaseHook):
 
         actions = ('trigger', 'acknowledge', 'resolve')
         if action not in actions:
-            raise ValueError("Event action must be one of: %s" % ', '.join(actions))
+            raise ValueError(f"Event action must be one of: {', '.join(actions)}")
         data = {
             "event_action": action,
             "payload": payload,
diff --git a/airflow/providers/plexus/operators/job.py b/airflow/providers/plexus/operators/job.py
index ece5df6..8f56987 100644
--- a/airflow/providers/plexus/operators/job.py
+++ b/airflow/providers/plexus/operators/job.py
@@ -149,9 +149,7 @@ class PlexusJobOperator(BaseOperator):
         """
         missing_params = self.required_params - set(self.job_params)
         if len(missing_params) > 0:
-            raise AirflowException(
-                "Missing the following required job_params: {}".format(", ".join(missing_params))
-            )
+            raise AirflowException(f"Missing the following required job_params: {', '.join(missing_params)}")
         params = {}
         for prm in self.job_params:
             if prm in self.lookups:
diff --git a/airflow/providers/postgres/hooks/postgres.py b/airflow/providers/postgres/hooks/postgres.py
index 0f38823..cd80bd9 100644
--- a/airflow/providers/postgres/hooks/postgres.py
+++ b/airflow/providers/postgres/hooks/postgres.py
@@ -223,7 +223,7 @@ class PostgresHook(DbApiHook):
         else:
             target_fields_fragment = ''
 
-        sql = "INSERT INTO {} {} VALUES ({})".format(table, target_fields_fragment, ",".join(placeholders))
+        sql = f"INSERT INTO {table} {target_fields_fragment} VALUES ({','.join(placeholders)})"
 
         if replace:
             if target_fields is None:
diff --git a/airflow/providers/qubole/hooks/qubole.py b/airflow/providers/qubole/hooks/qubole.py
index 0cc3832..6655de5 100644
--- a/airflow/providers/qubole/hooks/qubole.py
+++ b/airflow/providers/qubole/hooks/qubole.py
@@ -262,7 +262,7 @@ class QuboleHook(BaseHook):
         for key, value in self.kwargs.items():  # pylint: disable=too-many-nested-blocks
             if key in COMMAND_ARGS[cmd_type]:
                 if key in HYPHEN_ARGS:
-                    args.append("--{}={}".format(key.replace('_', '-'), value))
+                    args.append(f"--{key.replace('_', '-')}={value}")
                 elif key in positional_args_list:
                     inplace_args = value
                 elif key == 'tags':
@@ -273,7 +273,7 @@ class QuboleHook(BaseHook):
                 else:
                     args.append(f"--{key}={value}")
 
-        args.append("--tags={}".format(','.join(filter(None, tags))))
+        args.append(f"--tags={','.join(filter(None, tags))}")
 
         if inplace_args is not None:
             args += inplace_args.split(' ')
diff --git a/airflow/providers/salesforce/hooks/salesforce.py b/airflow/providers/salesforce/hooks/salesforce.py
index aeaef5d..d76baac 100644
--- a/airflow/providers/salesforce/hooks/salesforce.py
+++ b/airflow/providers/salesforce/hooks/salesforce.py
@@ -146,7 +146,7 @@ class SalesforceHook(BaseHook):
         :return: all instances of the object from Salesforce.
         :rtype: dict
         """
-        query = "SELECT {} FROM {}".format(",".join(fields), obj)
+        query = f"SELECT {','.join(fields)} FROM {obj}"
 
         self.log.info(
             "Making query to Salesforce: %s",
diff --git a/airflow/providers/sftp/operators/sftp.py b/airflow/providers/sftp/operators/sftp.py
index 39cc14b..f137352 100644
--- a/airflow/providers/sftp/operators/sftp.py
+++ b/airflow/providers/sftp/operators/sftp.py
@@ -154,7 +154,7 @@ class SFTPOperator(BaseOperator):
                     sftp_client.put(self.local_filepath, self.remote_filepath, confirm=self.confirm)
 
         except Exception as e:
-            raise AirflowException("Error while transferring {}, error: {}".format(file_msg, str(e)))
+            raise AirflowException(f"Error while transferring {file_msg}, error: {str(e)}")
 
         return self.local_filepath
 
diff --git a/airflow/providers/ssh/operators/ssh.py b/airflow/providers/ssh/operators/ssh.py
index 14cec96..32ac1f2 100644
--- a/airflow/providers/ssh/operators/ssh.py
+++ b/airflow/providers/ssh/operators/ssh.py
@@ -168,7 +168,7 @@ class SSHOperator(BaseOperator):
                     raise AirflowException(f"error running cmd: {self.command}, error: {error_msg}")
 
         except Exception as e:
-            raise AirflowException("SSH operator error: {}".format(str(e)))
+            raise AirflowException(f"SSH operator error: {str(e)}")
 
         return True
 
diff --git a/airflow/security/kerberos.py b/airflow/security/kerberos.py
index 87c269d..f1ddfdf 100644
--- a/airflow/security/kerberos.py
+++ b/airflow/security/kerberos.py
@@ -56,7 +56,7 @@ def renew_from_kt(principal: str, keytab: str, exit_on_fail: bool = True):
     """
     # The config is specified in seconds. But we ask for that same amount in
     # minutes to give ourselves a large renewal buffer.
-    renewal_lifetime = "%sm" % conf.getint('kerberos', 'reinit_frequency')
+    renewal_lifetime = f"{conf.getint('kerberos', 'reinit_frequency')}m"
 
     cmd_principal = principal or conf.get('kerberos', 'principal').replace("_HOST", socket.getfqdn())
 
@@ -128,7 +128,7 @@ def perform_krb181_workaround(principal: str):
     ret = subprocess.call(cmdv, close_fds=True)
 
     if ret != 0:
-        principal = "{}/{}".format(principal or conf.get('kerberos', 'principal'), socket.getfqdn())
+        principal = f"{principal or conf.get('kerberos', 'principal')}/{socket.getfqdn()}"
         princ = principal
         ccache = conf.get('kerberos', 'principal')
         log.error(
diff --git a/airflow/security/utils.py b/airflow/security/utils.py
index e5ceadb..ca203b5 100644
--- a/airflow/security/utils.py
+++ b/airflow/security/utils.py
@@ -55,7 +55,7 @@ def replace_hostname_pattern(components, host=None):
     fqdn = host
     if not fqdn or fqdn == '0.0.0.0':
         fqdn = get_hostname()
-    return '{}/{}@{}'.format(components[0], fqdn.lower(), components[2])
+    return f'{components[0]}/{fqdn.lower()}@{components[2]}'
 
 
 def get_fqdn(hostname_or_ip=None):
diff --git a/airflow/sensors/date_time.py b/airflow/sensors/date_time.py
index 9e997bc..4a6f11b 100644
--- a/airflow/sensors/date_time.py
+++ b/airflow/sensors/date_time.py
@@ -65,7 +65,7 @@ class DateTimeSensor(BaseSensorOperator):
             self.target_time = target_time
         else:
             raise TypeError(
-                "Expected str or datetime.datetime type for target_time. Got {}".format(type(target_time))
+                f"Expected str or datetime.datetime type for target_time. Got {type(target_time)}"
             )
 
     def poke(self, context: Dict) -> bool:
diff --git a/airflow/sensors/sql.py b/airflow/sensors/sql.py
index 573c7cd..923af6c 100644
--- a/airflow/sensors/sql.py
+++ b/airflow/sensors/sql.py
@@ -89,7 +89,7 @@ class SqlSensor(BaseSensorOperator):
         if conn.conn_type not in allowed_conn_type:
             raise AirflowException(
                 "The connection type is not supported by SqlSensor. "
-                + "Supported connection types: {}".format(list(allowed_conn_type))
+                + f"Supported connection types: {list(allowed_conn_type)}"
             )
         return conn.get_hook()
 
diff --git a/airflow/utils/cli.py b/airflow/utils/cli.py
index 68a0b44..600a693 100644
--- a/airflow/utils/cli.py
+++ b/airflow/utils/cli.py
@@ -265,7 +265,7 @@ def sigquit_handler(sig, frame):  # pylint: disable=unused-argument
     id_to_name = {th.ident: th.name for th in threading.enumerate()}
     code = []
     for thread_id, stack in sys._current_frames().items():  # pylint: disable=protected-access
-        code.append("\n# Thread: {}({})".format(id_to_name.get(thread_id, ""), thread_id))
+        code.append(f"\n# Thread: {id_to_name.get(thread_id, '')}({thread_id})")
         for filename, line_number, name, line in traceback.extract_stack(stack):
             code.append(f'File: "{filename}", line {line_number}, in {name}')
             if line:
diff --git a/airflow/utils/code_utils.py b/airflow/utils/code_utils.py
index 77cfa42..53b2db1 100644
--- a/airflow/utils/code_utils.py
+++ b/airflow/utils/code_utils.py
@@ -50,7 +50,7 @@ def get_python_source(x: Any) -> Optional[str]:
             pass
 
     if source_code is None:
-        source_code = 'No source code available for {}'.format(type(x))
+        source_code = f'No source code available for {type(x)}'
     return source_code
 
 
diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py
index 6f04cbf..7617bda 100644
--- a/airflow/utils/log/file_task_handler.py
+++ b/airflow/utils/log/file_task_handler.py
@@ -120,7 +120,7 @@ class FileTaskHandler(logging.Handler):
                     log += "".join(file.readlines())
             except Exception as e:  # pylint: disable=broad-except
                 log = f"*** Failed to load local log file: {location}\n"
-                log += "*** {}\n".format(str(e))
+                log += f"*** {str(e)}\n"
         elif conf.get('core', 'executor') == 'KubernetesExecutor':  # pylint: disable=too-many-nested-blocks
             try:
                 from airflow.kubernetes.kube_client import get_kube_client
@@ -158,7 +158,7 @@ class FileTaskHandler(logging.Handler):
                     log += line.decode()
 
             except Exception as f:  # pylint: disable=broad-except
-                log += '*** Unable to fetch logs from worker pod {} ***\n{}\n\n'.format(ti.hostname, str(f))
+                log += f'*** Unable to fetch logs from worker pod {ti.hostname} ***\n{str(f)}\n\n'
         else:
             url = os.path.join("http://{ti.hostname}:{worker_log_server_port}/log", log_relative_path).format(
                 ti=ti, worker_log_server_port=conf.get('celery', 'WORKER_LOG_SERVER_PORT')
@@ -180,7 +180,7 @@ class FileTaskHandler(logging.Handler):
 
                 log += '\n' + response.text
             except Exception as e:  # pylint: disable=broad-except
-                log += "*** Failed to fetch log file from worker. {}\n".format(str(e))
+                log += f"*** Failed to fetch log file from worker. {str(e)}\n"
 
         return log, {'end_of_log': True}
 
diff --git a/airflow/utils/timezone.py b/airflow/utils/timezone.py
index d302cbe..2b2521a 100644
--- a/airflow/utils/timezone.py
+++ b/airflow/utils/timezone.py
@@ -109,7 +109,7 @@ def make_aware(value, timezone=None):
 
     # Check that we won't overwrite the timezone of an aware datetime.
     if is_localized(value):
-        raise ValueError("make_aware expects a naive datetime, got %s" % value)
+        raise ValueError(f"make_aware expects a naive datetime, got {value}")
     if hasattr(value, 'fold'):
         # In case of python 3.6 we want to do the same that pendulum does for python3.5
         # i.e in case we move clock back we want to schedule the run at the time of the second
diff --git a/airflow/www/utils.py b/airflow/www/utils.py
index 265a12f..09ebe9a 100644
--- a/airflow/www/utils.py
+++ b/airflow/www/utils.py
@@ -128,14 +128,14 @@ def generate_pages(current_page, num_of_pages, search=None, status=None, window=
     is_disabled = 'disabled' if current_page <= 0 else ''
     output.append(
         first_node.format(
-            href_link="?{}".format(get_params(page=0, search=search, status=status)),  # noqa
+            href_link=f"?{get_params(page=0, search=search, status=status)}",  # noqa
             disabled=is_disabled,
         )
     )
 
     page_link = void_link
     if current_page > 0:
-        page_link = '?{}'.format(get_params(page=(current_page - 1), search=search, status=status))
+        page_link = f'?{get_params(page=current_page - 1, search=search, status=status)}'
 
     output.append(previous_node.format(href_link=page_link, disabled=is_disabled))  # noqa
 
@@ -157,7 +157,7 @@ def generate_pages(current_page, num_of_pages, search=None, status=None, window=
             'is_active': 'active' if is_current(current_page, page) else '',
             'href_link': void_link
             if is_current(current_page, page)
-            else '?{}'.format(get_params(page=page, search=search, status=status)),
+            else f'?{get_params(page=page, search=search, status=status)}',
             'page_num': page + 1,
         }
         output.append(page_node.format(**vals))  # noqa
@@ -167,13 +167,13 @@ def generate_pages(current_page, num_of_pages, search=None, status=None, window=
     page_link = (
         void_link
         if current_page >= num_of_pages - 1
-        else '?{}'.format(get_params(page=current_page + 1, search=search, status=status))
+        else f'?{get_params(page=current_page + 1, search=search, status=status)}'
     )
 
     output.append(next_node.format(href_link=page_link, disabled=is_disabled))  # noqa
     output.append(
         last_node.format(
-            href_link="?{}".format(get_params(page=last_page, search=search, status=status)),  # noqa
+            href_link=f"?{get_params(page=last_page, search=search, status=status)}",  # noqa
             disabled=is_disabled,
         )
     )
diff --git a/airflow/www/validators.py b/airflow/www/validators.py
index 9699a47..fe35dcb 100644
--- a/airflow/www/validators.py
+++ b/airflow/www/validators.py
@@ -37,7 +37,7 @@ class GreaterEqualThan(EqualTo):
         try:
             other = form[self.fieldname]
         except KeyError:
-            raise ValidationError(field.gettext("Invalid field name '%s'." % self.fieldname))
+            raise ValidationError(field.gettext(f"Invalid field name '{self.fieldname}'."))
 
         if field.data is None or other.data is None:
             return
@@ -50,7 +50,7 @@ class GreaterEqualThan(EqualTo):
             message = self.message
             if message is None:
                 message = field.gettext(
-                    'Field must be greater than or equal to %(other_label)s.' % message_args
+                    f"Field must be greater than or equal to {message_args['other_label']}."
                 )
             else:
                 message = message % message_args
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 1c00951..93ffc38 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -1639,7 +1639,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
         new_dag_state = set_dag_run_state_to_failed(dag, execution_date, commit=confirmed)
 
         if confirmed:
-            flash('Marked failed on {} task instances'.format(len(new_dag_state)))
+            flash(f'Marked failed on {len(new_dag_state)} task instances')
             return redirect(origin)
 
         else:
@@ -1668,7 +1668,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
         new_dag_state = set_dag_run_state_to_success(dag, execution_date, commit=confirmed)
 
         if confirmed:
-            flash('Marked success on {} task instances'.format(len(new_dag_state)))
+            flash(f'Marked success on {len(new_dag_state)} task instances')
             return redirect(origin)
 
         else:
@@ -1752,7 +1752,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
                 commit=True,
             )
 
-            flash("Marked {} on {} task instances".format(state, len(altered)))
+            flash(f"Marked {state} on {len(altered)} task instances")
             return redirect(origin)
 
         to_be_altered = set_state(
@@ -3410,10 +3410,7 @@ class DagRunModelView(AirflowModelView):
                 cleared_ti_count += len(tis)
                 models.clear_task_instances(tis, session, dag=dag)
 
-            flash(
-                "{count} dag runs and {altered_ti_count} task instances "
-                "were cleared".format(count=count, altered_ti_count=cleared_ti_count)
-            )
+            flash(f"{count} dag runs and {cleared_ti_count} task instances were cleared")
         except Exception:  # noqa pylint: disable=broad-except
             flash('Failed to clear state', 'error')
         return redirect(self.get_default_url())
@@ -3636,7 +3633,7 @@ class TaskInstanceModelView(AirflowModelView):
                 models.clear_task_instances(task_instances_list, session, dag=dag)
 
             session.commit()
-            flash("{} task instances have been cleared".format(len(task_instances)))
+            flash(f"{len(task_instances)} task instances have been cleared")
             self.update_redirect()
             return redirect(self.get_redirect())
         except Exception as e:  # noqa pylint: disable=broad-except
@@ -3651,11 +3648,7 @@ class TaskInstanceModelView(AirflowModelView):
             for ti in tis:
                 ti.set_state(target_state, session)
             session.commit()
-            flash(
-                "{count} task instances were set to '{target_state}'".format(
-                    count=count, target_state=target_state
-                )
-            )
+            flash(f"{count} task instances were set to '{target_state}'")
         except Exception:  # noqa pylint: disable=broad-except
             flash('Failed to set state', 'error')
 
diff --git a/breeze-complete b/breeze-complete
index 63b34ad..5562cee 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -97,6 +97,7 @@ dont-use-safe-filter
 end-of-file-fixer
 fix-encoding-pragma
 flake8
+flynt
 forbid-tabs
 helm-lint
 identity
diff --git a/docs/exts/airflow_intersphinx.py b/docs/exts/airflow_intersphinx.py
index ee83b8f..a3bd262 100644
--- a/docs/exts/airflow_intersphinx.py
+++ b/docs/exts/airflow_intersphinx.py
@@ -150,7 +150,7 @@ if __name__ == "__main__":
             except ValueError as exc:
                 print(exc.args[0] % exc.args[1:])
             except Exception as exc:  # pylint: disable=broad-except
-                print('Unknown error: %r' % exc)
+                print(f'Unknown error: {exc!r}')
 
         provider_mapping = _generate_provider_intersphinx_mapping()
 
diff --git a/docs/exts/exampleinclude.py b/docs/exts/exampleinclude.py
index 141ca82..815eb21 100644
--- a/docs/exts/exampleinclude.py
+++ b/docs/exts/exampleinclude.py
@@ -140,7 +140,7 @@ def register_source(app, env, modname):
     """
     entry = env._viewcode_modules.get(modname, None)
     if entry is False:
-        print("[%s] Entry is false for " % modname)
+        print(f"[{modname}] Entry is false for ")
         return False
 
     code_tags = app.emit_firstresult("viewcode-find-source", modname)
diff --git a/docs/exts/redirects.py b/docs/exts/redirects.py
index 7874124..20603c4 100644
--- a/docs/exts/redirects.py
+++ b/docs/exts/redirects.py
@@ -54,7 +54,7 @@ def generate_redirects(app):
             from_path = from_path.replace(in_suffix, '.html')
             to_path = to_path.replace(in_suffix, ".html")
 
-            to_path_prefix = "..%s" % os.path.sep * (len(from_path.split(os.path.sep)) - 1)
+            to_path_prefix = f"..{os.path.sep}" * (len(from_path.split(os.path.sep)) - 1)
             to_path = to_path_prefix + to_path
 
             log.debug("Resolved redirect '%s' to '%s'", from_path, to_path)
diff --git a/metastore_browser/hive_metastore.py b/metastore_browser/hive_metastore.py
index 58d4da3..462f245 100644
--- a/metastore_browser/hive_metastore.py
+++ b/metastore_browser/hive_metastore.py
@@ -95,7 +95,7 @@ class MetastoreBrowserView(BaseView):
     def partitions(self):
         """Retrieve table partitions"""
         schema, table = request.args.get("table").split('.')
-        sql = """
+        sql = f"""
         SELECT
             a.PART_NAME,
             a.CREATE_TIME,
@@ -111,9 +111,7 @@ class MetastoreBrowserView(BaseView):
             b.TBL_NAME like '{table}' AND
             d.NAME like '{schema}'
         ORDER BY PART_NAME DESC
-        """.format(
-            table=table, schema=schema
-        )
+        """
         hook = MySqlHook(METASTORE_MYSQL_CONN_ID)
         df = hook.get_pandas_df(sql)
         return df.to_html(
@@ -133,7 +131,7 @@ class MetastoreBrowserView(BaseView):
         if DB_DENY_LIST:
             dbs = ",".join(["'" + db + "'" for db in DB_DENY_LIST])
             where_clause = f"AND b.name NOT IN ({dbs})"
-        sql = """
+        sql = f"""
         SELECT CONCAT(b.NAME, '.', a.TBL_NAME), TBL_TYPE
         FROM TBLS a
         JOIN DBS b ON a.DB_ID = b.DB_ID
@@ -143,10 +141,8 @@ class MetastoreBrowserView(BaseView):
             b.NAME NOT LIKE '%tmp%' AND
             b.NAME NOT LIKE '%temp%'
         {where_clause}
-        LIMIT {LIMIT};
-        """.format(
-            where_clause=where_clause, LIMIT=TABLE_SELECTOR_LIMIT
-        )
+        LIMIT {TABLE_SELECTOR_LIMIT};
+        """
         hook = MySqlHook(METASTORE_MYSQL_CONN_ID)
         data = [{'id': row[0], 'text': row[0]} for row in hook.get_records(sql)]
         return json.dumps(data)
diff --git a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py
index 038e027..688f767 100755
--- a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py
+++ b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py
@@ -98,16 +98,16 @@ def assert_sets_equal(set1, set2):
     try:
         difference1 = set1.difference(set2)
     except TypeError as e:
-        raise AssertionError('invalid type when attempting set difference: %s' % e)
+        raise AssertionError(f'invalid type when attempting set difference: {e}')
     except AttributeError as e:
-        raise AssertionError('first argument does not support set difference: %s' % e)
+        raise AssertionError(f'first argument does not support set difference: {e}')
 
     try:
         difference2 = set2.difference(set1)
     except TypeError as e:
-        raise AssertionError('invalid type when attempting set difference: %s' % e)
+        raise AssertionError(f'invalid type when attempting set difference: {e}')
     except AttributeError as e:
-        raise AssertionError('second argument does not support set difference: %s' % e)
+        raise AssertionError(f'second argument does not support set difference: {e}')
 
     if not (difference1 or difference2):
         return
diff --git a/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py b/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py
index 6558901..0480286 100755
--- a/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py
+++ b/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py
@@ -121,7 +121,7 @@ def _write_option(configfile, idx, option):
     if option["example"]:
         if not str(option["name"]).endswith("_template"):
             option["example"] = option["example"].replace("{", "{{").replace("}", "}}")
-        configfile.write("# Example: {} = {}\n".format(option["name"], option["example"]))
+        configfile.write(f"# Example: {option['name']} = {option['example']}\n")
 
     if option["default"] is not None:
         if not isinstance(option["default"], str):
@@ -134,9 +134,9 @@ def _write_option(configfile, idx, option):
             value = " " + option["default"]
         else:
             value = ""
-        configfile.write("{} ={}\n".format(option["name"], value))
+        configfile.write(f"{option['name']} ={value}\n")
     else:
-        configfile.write("# {} =\n".format(option["name"]))
+        configfile.write(f"# {option['name']} =\n")
 
 
 if __name__ == '__main__':
diff --git a/tests/api/common/experimental/test_pool.py b/tests/api/common/experimental/test_pool.py
index ae00226..3c75a14 100644
--- a/tests/api/common/experimental/test_pool.py
+++ b/tests/api/common/experimental/test_pool.py
@@ -39,7 +39,7 @@ class TestPool(unittest.TestCase):
         clear_db_pools()
         self.pools = [Pool.get_default_pool()]
         for i in range(self.USER_POOL_COUNT):
-            name = 'experimental_%s' % (i + 1)
+            name = f'experimental_{i + 1}'
             pool = models.Pool(
                 pool=name,
                 slots=i,
diff --git a/tests/cli/commands/test_connection_command.py b/tests/cli/commands/test_connection_command.py
index ae78892..c81ff81 100644
--- a/tests/cli/commands/test_connection_command.py
+++ b/tests/cli/commands/test_connection_command.py
@@ -496,7 +496,7 @@ class TestCliAddConnections(unittest.TestCase):
                     "connections",
                     "add",
                     "new0",
-                    "--conn-uri=%s" % TEST_URL,
+                    f"--conn-uri={TEST_URL}",
                     "--conn-description=new0 description",
                 ],
                 "Successfully added `conn_id`=new0 : postgresql://airflow:airflow@host:5432/airflow",
@@ -516,7 +516,7 @@ class TestCliAddConnections(unittest.TestCase):
                     "connections",
                     "add",
                     "new1",
-                    "--conn-uri=%s" % TEST_URL,
+                    f"--conn-uri={TEST_URL}",
                     "--conn-description=new1 description",
                 ],
                 "Successfully added `conn_id`=new1 : postgresql://airflow:airflow@host:5432/airflow",
@@ -536,7 +536,7 @@ class TestCliAddConnections(unittest.TestCase):
                     "connections",
                     "add",
                     "new2",
-                    "--conn-uri=%s" % TEST_URL,
+                    f"--conn-uri={TEST_URL}",
                     "--conn-extra",
                     "{'extra': 'yes'}",
                 ],
@@ -557,7 +557,7 @@ class TestCliAddConnections(unittest.TestCase):
                     "connections",
                     "add",
                     "new3",
-                    "--conn-uri=%s" % TEST_URL,
+                    f"--conn-uri={TEST_URL}",
                     "--conn-extra",
                     "{'extra': 'yes'}",
                     "--conn-description",
@@ -651,12 +651,12 @@ class TestCliAddConnections(unittest.TestCase):
     def test_cli_connections_add_duplicate(self):
         conn_id = "to_be_duplicated"
         connection_command.connections_add(
-            self.parser.parse_args(["connections", "add", conn_id, "--conn-uri=%s" % TEST_URL])
+            self.parser.parse_args(["connections", "add", conn_id, f"--conn-uri={TEST_URL}"])
         )
         # Check for addition attempt
         with pytest.raises(SystemExit, match=rf"A connection with `conn_id`={conn_id} already exists"):
             connection_command.connections_add(
-                self.parser.parse_args(["connections", "add", conn_id, "--conn-uri=%s" % TEST_URL])
+                self.parser.parse_args(["connections", "add", conn_id, f"--conn-uri={TEST_URL}"])
             )
 
     def test_cli_connections_add_delete_with_missing_parameters(self):
@@ -671,7 +671,7 @@ class TestCliAddConnections(unittest.TestCase):
         # Attempt to add with invalid uri
         with pytest.raises(SystemExit, match=r"The URI provided to --conn-uri is invalid: nonsense_uri"):
             connection_command.connections_add(
-                self.parser.parse_args(["connections", "add", "new1", "--conn-uri=%s" % "nonsense_uri"])
+                self.parser.parse_args(["connections", "add", "new1", f"--conn-uri={'nonsense_uri'}"])
             )
 
 
diff --git a/tests/core/test_core.py b/tests/core/test_core.py
index fae2c73..f6dd6ee 100644
--- a/tests/core/test_core.py
+++ b/tests/core/test_core.py
@@ -172,7 +172,7 @@ class TestCore(unittest.TestCase):
         op = BashOperator(
             task_id='test_bash_operator_kill',
             execution_timeout=timedelta(seconds=1),
-            bash_command="/bin/bash -c 'sleep %s'" % sleep_time,
+            bash_command=f"/bin/bash -c 'sleep {sleep_time}'",
             dag=self.dag,
         )
         with pytest.raises(AirflowTaskTimeout):
diff --git a/tests/dags/test_subdag.py b/tests/dags/test_subdag.py
index e45b2a1..f2227dc 100644
--- a/tests/dags/test_subdag.py
+++ b/tests/dags/test_subdag.py
@@ -48,7 +48,7 @@ def subdag(parent_dag_name, child_dag_name, args):
 
     for i in range(2):
         DummyOperator(
-            task_id='{}-task-{}'.format(child_dag_name, i + 1),
+            task_id=f'{child_dag_name}-task-{i + 1}',
             default_args=args,
             dag=dag_subdag,
         )
diff --git a/tests/dags_corrupted/test_impersonation_custom.py b/tests/dags_corrupted/test_impersonation_custom.py
index 77ea1ed..4d3b978 100644
--- a/tests/dags_corrupted/test_impersonation_custom.py
+++ b/tests/dags_corrupted/test_impersonation_custom.py
@@ -39,7 +39,7 @@ dag = DAG(dag_id='impersonation_with_custom_pkg', default_args=args)
 
 def print_today():
     date_time = FakeDatetime.utcnow()
-    print('Today is {}'.format(date_time.strftime('%Y-%m-%d')))
+    print(f"Today is {date_time.strftime('%Y-%m-%d')}")
 
 
 def check_hive_conf():
diff --git a/tests/executors/test_celery_executor.py b/tests/executors/test_celery_executor.py
index 944fa49..44edc47 100644
--- a/tests/executors/test_celery_executor.py
+++ b/tests/executors/test_celery_executor.py
@@ -391,7 +391,7 @@ class ClassWithCustomAttributes:
             setattr(self, key, value)
 
     def __str__(self):
-        return "{}({})".format(ClassWithCustomAttributes.__name__, str(self.__dict__))
+        return f"{ClassWithCustomAttributes.__name__}({str(self.__dict__)})"
 
     def __repr__(self):
         return self.__str__()
diff --git a/tests/hooks/test_dbapi.py b/tests/hooks/test_dbapi.py
index 2cc916d..0f6c55a 100644
--- a/tests/hooks/test_dbapi.py
+++ b/tests/hooks/test_dbapi.py
@@ -125,7 +125,7 @@ class TestDbApiHook(unittest.TestCase):
         commit_count = 2  # The first and last commit
         assert commit_count == self.conn.commit.call_count
 
-        sql = "INSERT INTO {} ({}) VALUES (%s)".format(table, target_fields[0])
+        sql = f"INSERT INTO {table} ({target_fields[0]}) VALUES (%s)"
         for row in rows:
             self.cur.execute.assert_any_call(sql, row)
 
diff --git a/tests/models/test_baseoperator.py b/tests/models/test_baseoperator.py
index 95607a6..f942b89 100644
--- a/tests/models/test_baseoperator.py
+++ b/tests/models/test_baseoperator.py
@@ -42,7 +42,7 @@ class ClassWithCustomAttributes:
             setattr(self, key, value)
 
     def __str__(self):
-        return "{}({})".format(ClassWithCustomAttributes.__name__, str(self.__dict__))
+        return f"{ClassWithCustomAttributes.__name__}({str(self.__dict__)})"
 
     def __repr__(self):
         return self.__str__()
@@ -154,7 +154,7 @@ class TestBaseOperator(unittest.TestCase):
             ({"user_defined_macros": {"foo": "bar"}}, "{{ foo }}", {}, "bar"),
             ({"user_defined_macros": {"foo": "bar"}}, 1, {}, 1),
             (
-                {"user_defined_filters": {"hello": lambda name: "Hello %s" % name}},
+                {"user_defined_filters": {"hello": lambda name: f"Hello {name}"}},
                 "{{ 'world' | hello }}",
                 {},
                 "Hello world",
diff --git a/tests/models/test_connection.py b/tests/models/test_connection.py
index a96b89a..526d029 100644
--- a/tests/models/test_connection.py
+++ b/tests/models/test_connection.py
@@ -55,7 +55,7 @@ class UriTestCaseConfig:
 
     @staticmethod
     def uri_test_name(func, num, param):
-        return "{}_{}_{}".format(func.__name__, num, param.args[0].description.replace(' ', '_'))
+        return f"{func.__name__}_{num}_{param.args[0].description.replace(' ', '_')}"
 
 
 class TestConnection(unittest.TestCase):
diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py
index 60171d8..123c119 100644
--- a/tests/models/test_dag.py
+++ b/tests/models/test_dag.py
@@ -476,7 +476,7 @@ class TestDag(unittest.TestCase):
 
     def test_user_defined_filters(self):
         def jinja_udf(name):
-            return 'Hello %s' % name
+            return f'Hello {name}'
 
         dag = models.DAG('test-dag', start_date=DEFAULT_DATE, user_defined_filters={"hello": jinja_udf})
         jinja_env = dag.get_template_env()
@@ -1540,7 +1540,7 @@ class TestDag(unittest.TestCase):
             )
 
             for i in range(2):
-                DummyOperator(task_id='{}-task-{}'.format(child_dag_name, i + 1), dag=dag_subdag)
+                DummyOperator(task_id=f'{child_dag_name}-task-{i + 1}', dag=dag_subdag)
 
             return dag_subdag
 
diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py
index 6c6b1cb..ce11892 100644
--- a/tests/models/test_dagbag.py
+++ b/tests/models/test_dagbag.py
@@ -105,7 +105,7 @@ class TestDagBag(unittest.TestCase):
                 dagbag = models.DagBag(include_examples=False, safe_mode=True)
 
             assert len(dagbag.dagbag_stats) == 1
-            assert dagbag.dagbag_stats[0].file == "/{}".format(os.path.basename(f.name))
+            assert dagbag.dagbag_stats[0].file == f"/{os.path.basename(f.name)}"
 
     def test_safe_mode_heuristic_mismatch(self):
         """With safe mode enabled, a file not matching the discovery heuristics
@@ -122,7 +122,7 @@ class TestDagBag(unittest.TestCase):
             with conf_vars({('core', 'dags_folder'): self.empty_dir}):
                 dagbag = models.DagBag(include_examples=False, safe_mode=False)
             assert len(dagbag.dagbag_stats) == 1
-            assert dagbag.dagbag_stats[0].file == "/{}".format(os.path.basename(f.name))
+            assert dagbag.dagbag_stats[0].file == f"/{os.path.basename(f.name)}"
 
     def test_process_file_that_contains_multi_bytes_char(self):
         """
@@ -298,7 +298,7 @@ class TestDagBag(unittest.TestCase):
         actual_found_dag_ids = list(map(lambda dag: dag.dag_id, actual_found_dags))
 
         for dag_id in expected_dag_ids:
-            actual_dagbag.log.info('validating %s' % dag_id)
+            actual_dagbag.log.info(f'validating {dag_id}')
             assert (
                 dag_id in actual_found_dag_ids
             ) == should_be_found, 'dag "{}" should {}have been found after processing dag "{}"'.format(
diff --git a/tests/models/test_renderedtifields.py b/tests/models/test_renderedtifields.py
index 1cf4e3f..f754753 100644
--- a/tests/models/test_renderedtifields.py
+++ b/tests/models/test_renderedtifields.py
@@ -50,7 +50,7 @@ class ClassWithCustomAttributes:
             setattr(self, key, value)
 
     def __str__(self):
-        return "{}({})".format(ClassWithCustomAttributes.__name__, str(self.__dict__))
+        return f"{ClassWithCustomAttributes.__name__}({str(self.__dict__)})"
 
     def __repr__(self):
         return self.__str__()
diff --git a/tests/providers/amazon/aws/hooks/test_batch_waiters.py b/tests/providers/amazon/aws/hooks/test_batch_waiters.py
index 51d42e0..b852c2e 100644
--- a/tests/providers/amazon/aws/hooks/test_batch_waiters.py
+++ b/tests/providers/amazon/aws/hooks/test_batch_waiters.py
@@ -198,7 +198,7 @@ def batch_infrastructure(
     assert resp["jobDefinitionArn"]
     job_definition_arn = resp["jobDefinitionArn"]
     assert resp["revision"]
-    assert resp["jobDefinitionArn"].endswith("{}:{}".format(resp["jobDefinitionName"], resp["revision"]))
+    assert resp["jobDefinitionArn"].endswith(f"{resp['jobDefinitionName']}:{resp['revision']}")
 
     infrastructure.vpc_id = vpc_id
     infrastructure.subnet_id = subnet_id
diff --git a/tests/providers/amazon/aws/hooks/test_s3.py b/tests/providers/amazon/aws/hooks/test_s3.py
index d962068..fc00d9d 100644
--- a/tests/providers/amazon/aws/hooks/test_s3.py
+++ b/tests/providers/amazon/aws/hooks/test_s3.py
@@ -118,8 +118,8 @@ class TestAwsS3Hook:
         bucket = hook.get_bucket(s3_bucket)
 
         # we don't need to test the paginator that's covered by boto tests
-        keys = ["%s/b" % i for i in range(2)]
-        dirs = ["%s/" % i for i in range(2)]
+        keys = [f"{i}/b" for i in range(2)]
+        dirs = [f"{i}/" for i in range(2)]
         for key in keys:
             bucket.put_object(Key=key, Body=b'a')
 
diff --git a/tests/providers/apache/hive/operators/test_hive_stats.py b/tests/providers/apache/hive/operators/test_hive_stats.py
index d38e007..02dbdd8 100644
--- a/tests/providers/apache/hive/operators/test_hive_stats.py
+++ b/tests/providers/apache/hive/operators/test_hive_stats.py
@@ -274,17 +274,13 @@ class TestHiveStatsCollectionOperator(TestHiveEnvironment):
         hive_stats_collection_operator = HiveStatsCollectionOperator(**self.kwargs)
         hive_stats_collection_operator.execute(context={})
 
-        sql = """
+        sql = f"""
             DELETE FROM hive_stats
             WHERE
-                table_name='{}' AND
-                partition_repr='{}' AND
-                dttm='{}';
-            """.format(
-            hive_stats_collection_operator.table,
-            mock_json_dumps.return_value,
-            hive_stats_collection_operator.dttm,
-        )
+                table_name='{hive_stats_collection_operator.table}' AND
+                partition_repr='{mock_json_dumps.return_value}' AND
+                dttm='{hive_stats_collection_operator.dttm}';
+            """
         mock_mysql_hook.return_value.run.assert_called_once_with(sql)
 
     @unittest.skipIf(
diff --git a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
index c6f7736..0413b5c 100644
--- a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
@@ -317,8 +317,8 @@ class TestTransfer(unittest.TestCase):
             with hook.get_conn() as conn:
                 conn.execute(f"DROP TABLE IF EXISTS {mysql_table}")
                 conn.execute(
-                    """
-                    CREATE TABLE {} (
+                    f"""
+                    CREATE TABLE {mysql_table} (
                         c0 TINYINT,
                         c1 SMALLINT,
                         c2 MEDIUMINT,
@@ -326,9 +326,7 @@ class TestTransfer(unittest.TestCase):
                         c4 BIGINT,
                         c5 TIMESTAMP
                     )
-                """.format(
-                        mysql_table
-                    )
+                """
                 )
 
             op = MySqlToHiveOperator(
@@ -368,14 +366,12 @@ class TestTransfer(unittest.TestCase):
             with hook.get_conn() as conn:
                 conn.execute(f"DROP TABLE IF EXISTS {mysql_table}")
                 conn.execute(
-                    """
-                    CREATE TABLE {} (
+                    f"""
+                    CREATE TABLE {mysql_table} (
                         c0 VARCHAR(25),
                         c1 VARCHAR(25)
                     )
-                """.format(
-                        mysql_table
-                    )
+                """
                 )
                 conn.execute(
                     """
@@ -475,8 +471,8 @@ class TestTransfer(unittest.TestCase):
             with hook.get_conn() as conn:
                 conn.execute(f"DROP TABLE IF EXISTS {mysql_table}")
                 conn.execute(
-                    """
-                    CREATE TABLE {} (
+                    f"""
+                    CREATE TABLE {mysql_table} (
                         c0 TINYINT   UNSIGNED,
                         c1 SMALLINT  UNSIGNED,
                         c2 MEDIUMINT UNSIGNED,
@@ -488,9 +484,7 @@ class TestTransfer(unittest.TestCase):
                         c8 INT,
                         c9 BIGINT
                     )
-                """.format(
-                        mysql_table
-                    )
+                """
                 )
                 conn.execute(
                     """
diff --git a/tests/providers/apache/spark/hooks/test_spark_sql.py b/tests/providers/apache/spark/hooks/test_spark_sql.py
index 85a5159..35e4330 100644
--- a/tests/providers/apache/spark/hooks/test_spark_sql.py
+++ b/tests/providers/apache/spark/hooks/test_spark_sql.py
@@ -60,11 +60,11 @@ class TestSparkSqlHook(unittest.TestCase):
         cmd = ' '.join(hook._prepare_command(""))
 
         # Check all the parameters
-        assert "--executor-cores {}".format(self._config['executor_cores']) in cmd
-        assert "--executor-memory {}".format(self._config['executor_memory']) in cmd
-        assert "--keytab {}".format(self._config['keytab']) in cmd
-        assert "--name {}".format(self._config['name']) in cmd
-        assert "--num-executors {}".format(self._config['num_executors']) in cmd
+        assert f"--executor-cores {self._config['executor_cores']}" in cmd
+        assert f"--executor-memory {self._config['executor_memory']}" in cmd
+        assert f"--keytab {self._config['keytab']}" in cmd
+        assert f"--name {self._config['name']}" in cmd
+        assert f"--num-executors {self._config['num_executors']}" in cmd
         sql_path = get_after('-f', hook._prepare_command(""))
         assert self._config['sql'].strip() == sql_path
 
diff --git a/tests/providers/apache/sqoop/hooks/test_sqoop.py b/tests/providers/apache/sqoop/hooks/test_sqoop.py
index 332021a..08926d4 100644
--- a/tests/providers/apache/sqoop/hooks/test_sqoop.py
+++ b/tests/providers/apache/sqoop/hooks/test_sqoop.py
@@ -179,29 +179,29 @@ class TestSqoopHook(unittest.TestCase):
 
         # Check if the config has been extracted from the json
         if self._config_json['namenode']:
-            assert "-fs {}".format(self._config_json['namenode']) in cmd
+            assert f"-fs {self._config_json['namenode']}" in cmd
 
         if self._config_json['job_tracker']:
-            assert "-jt {}".format(self._config_json['job_tracker']) in cmd
+            assert f"-jt {self._config_json['job_tracker']}" in cmd
 
         if self._config_json['libjars']:
-            assert "-libjars {}".format(self._config_json['libjars']) in cmd
+            assert f"-libjars {self._config_json['libjars']}" in cmd
 
         if self._config_json['files']:
-            assert "-files {}".format(self._config_json['files']) in cmd
+            assert f"-files {self._config_json['files']}" in cmd
 
         if self._config_json['archives']:
-            assert "-archives {}".format(self._config_json['archives']) in cmd
+            assert f"-archives {self._config_json['archives']}" in cmd
 
-        assert "--hcatalog-database {}".format(self._config['hcatalog_database']) in cmd
-        assert "--hcatalog-table {}".format(self._config['hcatalog_table']) in cmd
+        assert f"--hcatalog-database {self._config['hcatalog_database']}" in cmd
+        assert f"--hcatalog-table {self._config['hcatalog_table']}" in cmd
 
         # Check the regulator stuff passed by the default constructor
         if self._config['verbose']:
             assert "--verbose" in cmd
 
         if self._config['num_mappers']:
-            assert "--num-mappers {}".format(self._config['num_mappers']) in cmd
+            assert f"--num-mappers {self._config['num_mappers']}" in cmd
 
         for key, value in self._config['properties'].items():
             assert f"-D {key}={value}" in cmd
@@ -243,21 +243,14 @@ class TestSqoopHook(unittest.TestCase):
             )
         )
 
-        assert "--input-null-string {}".format(self._config_export['input_null_string']) in cmd
-        assert "--input-null-non-string {}".format(self._config_export['input_null_non_string']) in cmd
-        assert "--staging-table {}".format(self._config_export['staging_table']) in cmd
-        assert "--enclosed-by {}".format(self._config_export['enclosed_by']) in cmd
-        assert "--escaped-by {}".format(self._config_export['escaped_by']) in cmd
-        assert (
-            "--input-fields-terminated-by {}".format(self._config_export['input_fields_terminated_by']) in cmd
-        )
-        assert (
-            "--input-lines-terminated-by {}".format(self._config_export['input_lines_terminated_by']) in cmd
-        )
-        assert (
-            "--input-optionally-enclosed-by {}".format(self._config_export['input_optionally_enclosed_by'])
-            in cmd
-        )
+        assert f"--input-null-string {self._config_export['input_null_string']}" in cmd
+        assert f"--input-null-non-string {self._config_export['input_null_non_string']}" in cmd
+        assert f"--staging-table {self._config_export['staging_table']}" in cmd
+        assert f"--enclosed-by {self._config_export['enclosed_by']}" in cmd
+        assert f"--escaped-by {self._config_export['escaped_by']}" in cmd
+        assert f"--input-fields-terminated-by {self._config_export['input_fields_terminated_by']}" in cmd
+        assert f"--input-lines-terminated-by {self._config_export['input_lines_terminated_by']}" in cmd
+        assert f"--input-optionally-enclosed-by {self._config_export['input_optionally_enclosed_by']}" in cmd
         # these options are from the extra export options
         assert "--update-key id" in cmd
         assert "--update-mode allowinsert" in cmd
@@ -301,10 +294,10 @@ class TestSqoopHook(unittest.TestCase):
         if self._config_import['direct']:
             assert '--direct' in cmd
 
-        assert '--target-dir {}'.format(self._config_import['target_dir']) in cmd
+        assert f"--target-dir {self._config_import['target_dir']}" in cmd
 
-        assert '--driver {}'.format(self._config_import['driver']) in cmd
-        assert '--split-by {}'.format(self._config_import['split_by']) in cmd
+        assert f"--driver {self._config_import['driver']}" in cmd
+        assert f"--split-by {self._config_import['split_by']}" in cmd
         # these are from extra options, but not passed to this cmd import command
         assert '--show' not in cmd
         assert 'hcatalog-storage-stanza \"stored as orcfile\"' not in cmd
diff --git a/tests/providers/elasticsearch/log/elasticmock/__init__.py b/tests/providers/elasticsearch/log/elasticmock/__init__.py
index 2490dbe..c2a9080 100644
--- a/tests/providers/elasticsearch/log/elasticmock/__init__.py
+++ b/tests/providers/elasticsearch/log/elasticmock/__init__.py
@@ -51,7 +51,7 @@ ELASTIC_INSTANCES = {}  # type: Dict[str, FakeElasticsearch]
 
 def _get_elasticmock(hosts=None, *args, **kwargs):  # pylint: disable=unused-argument
     host = _normalize_hosts(hosts)[0]
-    elastic_key = '{}:{}'.format(host.get('host', 'localhost'), host.get('port', 9200))
+    elastic_key = f"{host.get('host', 'localhost')}:{host.get('port', 9200)}"
 
     if elastic_key in ELASTIC_INSTANCES:
         connection = ELASTIC_INSTANCES.get(elastic_key)
diff --git a/tests/providers/google/cloud/hooks/test_cloud_sql.py b/tests/providers/google/cloud/hooks/test_cloud_sql.py
index 003245d..9e9ec70 100644
--- a/tests/providers/google/cloud/hooks/test_cloud_sql.py
+++ b/tests/providers/google/cloud/hooks/test_cloud_sql.py
@@ -1061,9 +1061,7 @@ class TestCloudSqlDatabaseQueryHook(unittest.TestCase):
         project = self.sql_connection.extra_dejson['project_id']
         location = self.sql_connection.extra_dejson['location']
         instance = self.sql_connection.extra_dejson['instance']
-        instance_spec = "{project}:{location}:{instance}".format(
-            project=project, location=location, instance=instance
-        )
+        instance_spec = f"{project}:{location}:{instance}"
         assert sqlproxy_runner.instance_specification == instance_spec
 
     @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection")
diff --git a/tests/providers/google/cloud/hooks/test_pubsub.py b/tests/providers/google/cloud/hooks/test_pubsub.py
index 0841806..4086526 100644
--- a/tests/providers/google/cloud/hooks/test_pubsub.py
+++ b/tests/providers/google/cloud/hooks/test_pubsub.py
@@ -130,17 +130,17 @@ class TestPubSubHook(unittest.TestCase):
     @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn'))
     def test_delete_nonexisting_topic_failifnotexists(self, mock_service):
         mock_service.return_value.delete_topic.side_effect = NotFound(
-            'Topic does not exists: %s' % EXPANDED_TOPIC
+            f'Topic does not exists: {EXPANDED_TOPIC}'
         )
         with pytest.raises(PubSubException) as ctx:
             self.pubsub_hook.delete_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC, fail_if_not_exists=True)
 
-        assert str(ctx.value) == 'Topic does not exist: %s' % EXPANDED_TOPIC
+        assert str(ctx.value) == f'Topic does not exist: {EXPANDED_TOPIC}'
 
     @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn'))
     def test_delete_topic_api_call_error(self, mock_service):
         mock_service.return_value.delete_topic.side_effect = GoogleAPICallError(
-            'Error deleting topic: %s' % EXPANDED_TOPIC
+            f'Error deleting topic: {EXPANDED_TOPIC}'
         )
         with pytest.raises(PubSubException):
             self.pubsub_hook.delete_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC, fail_if_not_exists=True)
@@ -148,23 +148,23 @@ class TestPubSubHook(unittest.TestCase):
     @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn'))
     def test_create_preexisting_topic_failifexists(self, mock_service):
         mock_service.return_value.create_topic.side_effect = AlreadyExists(
-            'Topic already exists: %s' % TEST_TOPIC
+            f'Topic already exists: {TEST_TOPIC}'
         )
         with pytest.raises(PubSubException) as ctx:
             self.pubsub_hook.create_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC, fail_if_exists=True)
-        assert str(ctx.value) == 'Topic already exists: %s' % TEST_TOPIC
+        assert str(ctx.value) == f'Topic already exists: {TEST_TOPIC}'
 
     @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn'))
     def test_create_preexisting_topic_nofailifexists(self, mock_service):
         mock_service.return_value.create_topic.side_effect = AlreadyExists(
-            'Topic already exists: %s' % EXPANDED_TOPIC
+            f'Topic already exists: {EXPANDED_TOPIC}'
         )
         self.pubsub_hook.create_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC)
 
     @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn'))
     def test_create_topic_api_call_error(self, mock_service):
         mock_service.return_value.create_topic.side_effect = GoogleAPICallError(
-            'Error creating topic: %s' % TEST_TOPIC
+            f'Error creating topic: {TEST_TOPIC}'
         )
         with pytest.raises(PubSubException):
             self.pubsub_hook.create_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC, fail_if_exists=True)
@@ -238,18 +238,18 @@ class TestPubSubHook(unittest.TestCase):
     @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client'))
     def test_delete_nonexisting_subscription_failifnotexists(self, mock_service):
         mock_service.delete_subscription.side_effect = NotFound(
-            'Subscription does not exists: %s' % EXPANDED_SUBSCRIPTION
+            f'Subscription does not exists: {EXPANDED_SUBSCRIPTION}'
         )
         with pytest.raises(PubSubException) as ctx:
             self.pubsub_hook.delete_subscription(
                 project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, fail_if_not_exists=True
             )
-        assert str(ctx.value) == 'Subscription does not exist: %s' % EXPANDED_SUBSCRIPTION
+        assert str(ctx.value) == f'Subscription does not exist: {EXPANDED_SUBSCRIPTION}'
 
     @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client'))
     def test_delete_subscription_api_call_error(self, mock_service):
         mock_service.delete_subscription.side_effect = GoogleAPICallError(
-            'Error deleting subscription %s' % EXPANDED_SUBSCRIPTION
+            f'Error deleting subscription {EXPANDED_SUBSCRIPTION}'
         )
         with pytest.raises(PubSubException):
             self.pubsub_hook.delete_subscription(
@@ -262,7 +262,7 @@ class TestPubSubHook(unittest.TestCase):
         self, mock_uuid, mock_service
     ):  # noqa  # pylint: disable=unused-argument,line-too-long
         create_method = mock_service.create_subscription
-        expected_name = EXPANDED_SUBSCRIPTION.replace(TEST_SUBSCRIPTION, 'sub-%s' % TEST_UUID)
+        expected_name = EXPANDED_SUBSCRIPTION.replace(TEST_SUBSCRIPTION, f'sub-{TEST_UUID}')
 
         response = self.pubsub_hook.create_subscription(project_id=TEST_PROJECT, topic=TEST_TOPIC)
         create_method.assert_called_once_with(
@@ -282,7 +282,7 @@ class TestPubSubHook(unittest.TestCase):
             timeout=None,
             metadata=None,
         )
-        assert 'sub-%s' % TEST_UUID == response
+        assert f'sub-{TEST_UUID}' == response
 
     @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client'))
     def test_create_subscription_with_ack_deadline(self, mock_service):
@@ -342,18 +342,18 @@ class TestPubSubHook(unittest.TestCase):
     @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client'))
     def test_create_subscription_failifexists(self, mock_service):
         mock_service.create_subscription.side_effect = AlreadyExists(
-            'Subscription already exists: %s' % EXPANDED_SUBSCRIPTION
+            f'Subscription already exists: {EXPANDED_SUBSCRIPTION}'
         )
         with pytest.raises(PubSubException) as ctx:
             self.pubsub_hook.create_subscription(
                 project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION, fail_if_exists=True
             )
-        assert str(ctx.value) == 'Subscription already exists: %s' % EXPANDED_SUBSCRIPTION
+        assert str(ctx.value) == f'Subscription already exists: {EXPANDED_SUBSCRIPTION}'
 
     @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client'))
     def test_create_subscription_api_call_error(self, mock_service):
         mock_service.create_subscription.side_effect = GoogleAPICallError(
-            'Error creating subscription %s' % EXPANDED_SUBSCRIPTION
+            f'Error creating subscription {EXPANDED_SUBSCRIPTION}'
         )
         with pytest.raises(PubSubException):
             self.pubsub_hook.create_subscription(
@@ -363,7 +363,7 @@ class TestPubSubHook(unittest.TestCase):
     @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client'))
     def test_create_subscription_nofailifexists(self, mock_service):
         mock_service.create_subscription.side_effect = AlreadyExists(
-            'Subscription already exists: %s' % EXPANDED_SUBSCRIPTION
+            f'Subscription already exists: {EXPANDED_SUBSCRIPTION}'
         )
         response = self.pubsub_hook.create_subscription(
             project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION
diff --git a/tests/providers/google/cloud/operators/test_dataflow.py b/tests/providers/google/cloud/operators/test_dataflow.py
index 7e290d7..c682a31 100644
--- a/tests/providers/google/cloud/operators/test_dataflow.py
+++ b/tests/providers/google/cloud/operators/test_dataflow.py
@@ -55,7 +55,7 @@ DEFAULT_OPTIONS_TEMPLATE = {
     'zone': 'us-central1-f',
 }
 ADDITIONAL_OPTIONS = {'output': 'gs://test/output', 'labels': {'foo': 'bar'}}
-TEST_VERSION = 'v{}'.format(version.replace('.', '-').replace('+', '-'))
+TEST_VERSION = f"v{version.replace('.', '-').replace('+', '-')}"
 EXPECTED_ADDITIONAL_OPTIONS = {
     'output': 'gs://test/output',
     'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
diff --git a/tests/providers/google/cloud/operators/test_mlengine_utils.py b/tests/providers/google/cloud/operators/test_mlengine_utils.py
index 539ee60..65b41b6 100644
--- a/tests/providers/google/cloud/operators/test_mlengine_utils.py
+++ b/tests/providers/google/cloud/operators/test_mlengine_utils.py
@@ -28,7 +28,7 @@ from airflow.providers.google.cloud.utils import mlengine_operator_utils
 from airflow.version import version
 
 DEFAULT_DATE = datetime.datetime(2017, 6, 6)
-TEST_VERSION = 'v{}'.format(version.replace('.', '-').replace('+', '-'))
+TEST_VERSION = f"v{version.replace('.', '-').replace('+', '-')}"
 
 
 class TestCreateEvaluateOps(unittest.TestCase):
@@ -80,7 +80,7 @@ class TestCreateEvaluateOps(unittest.TestCase):
             input_paths=input_with_model['inputPaths'],
             prediction_path=input_with_model['outputPath'],
             metric_fn_and_keys=(self.metric_fn, ['err']),
-            validate_fn=(lambda x: 'err=%.1f' % x['err']),
+            validate_fn=(lambda x: f"err={x['err']:.1f}"),
             dag=self.dag,
             py_interpreter="python3",
         )
@@ -168,7 +168,7 @@ class TestCreateEvaluateOps(unittest.TestCase):
             'input_paths': input_with_model['inputPaths'],
             'prediction_path': input_with_model['outputPath'],
             'metric_fn_and_keys': (self.metric_fn, ['err']),
-            'validate_fn': (lambda x: 'err=%.1f' % x['err']),
+            'validate_fn': (lambda x: f"err={x['err']:.1f}"),
         }
 
         with pytest.raises(AirflowException, match='Missing model origin'):
diff --git a/tests/providers/google/cloud/sensors/test_gcs.py b/tests/providers/google/cloud/sensors/test_gcs.py
index e3c8917..77e0c35 100644
--- a/tests/providers/google/cloud/sensors/test_gcs.py
+++ b/tests/providers/google/cloud/sensors/test_gcs.py
@@ -175,7 +175,7 @@ class TestGoogleCloudStoragePrefixSensor(TestCase):
             impersonation_chain=TEST_IMPERSONATION_CHAIN,
             poke_interval=0,
         )
-        generated_messages = ['test-prefix/obj%s' % i for i in range(5)]
+        generated_messages = [f'test-prefix/obj{i}' for i in range(5)]
         mock_hook.return_value.list.return_value = generated_messages
 
         response = task.execute(None)
diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py
index 1621829..b3c0acd 100644
--- a/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py
+++ b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py
@@ -30,7 +30,7 @@ class TestBigQueryToBigQueryOperator(unittest.TestCase):
     @mock.patch('airflow.providers.google.cloud.transfers.bigquery_to_bigquery.BigQueryHook')
     def test_execute(self, mock_hook):
         source_project_dataset_tables = f'{TEST_DATASET}.{TEST_TABLE_ID}'
-        destination_project_dataset_table = '{}.{}'.format(TEST_DATASET + '_new', TEST_TABLE_ID)
+        destination_project_dataset_table = f"{TEST_DATASET + '_new'}.{TEST_TABLE_ID}"
         write_disposition = 'WRITE_EMPTY'
         create_disposition = 'CREATE_IF_NEEDED'
         labels = {'k1': 'v1'}
diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py b/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py
index 5c0c38c..2df9fea 100644
--- a/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py
@@ -143,7 +143,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
             source_bucket=TEST_BUCKET,
             source_object=SOURCE_OBJECT_WILDCARD_FILENAME,
             destination_bucket=DESTINATION_BUCKET,
-            destination_object='{}/{}'.format(DESTINATION_OBJECT_PREFIX, SOURCE_OBJECT_WILDCARD_SUFFIX[:-1]),
+            destination_object=f'{DESTINATION_OBJECT_PREFIX}/{SOURCE_OBJECT_WILDCARD_SUFFIX[:-1]}',
         )
 
         operator.execute(None)
diff --git a/tests/providers/google/cloud/utils/test_mlengine_operator_utils.py b/tests/providers/google/cloud/utils/test_mlengine_operator_utils.py
index 289eb43..5e19c17 100644
--- a/tests/providers/google/cloud/utils/test_mlengine_operator_utils.py
+++ b/tests/providers/google/cloud/utils/test_mlengine_operator_utils.py
@@ -83,11 +83,11 @@ METRIC_KEYS_EXPECTED = ','.join(METRIC_KEYS)
 
 def validate_err_and_count(summary):
     if summary['err'] > 0.2:
-        raise ValueError('Too high err>0.2; summary=%s' % summary)
+        raise ValueError(f'Too high err>0.2; summary={summary}')
     if summary['mse'] > 0.05:
-        raise ValueError('Too high mse>0.05; summary=%s' % summary)
+        raise ValueError(f'Too high mse>0.05; summary={summary}')
     if summary['count'] < 1000:
-        raise ValueError('Too few instances<1000; summary=%s' % summary)
+        raise ValueError(f'Too few instances<1000; summary={summary}')
     return summary
 
 
diff --git a/tests/providers/mysql/hooks/test_mysql.py b/tests/providers/mysql/hooks/test_mysql.py
index 538381f..19f0bd8 100644
--- a/tests/providers/mysql/hooks/test_mysql.py
+++ b/tests/providers/mysql/hooks/test_mysql.py
@@ -431,10 +431,8 @@ class TestMySql(unittest.TestCase):
             from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces
 
             assert mock_execute.call_count == 1
-            query = """
+            query = f"""
                 SELECT * INTO OUTFILE '{tmp_file}'
                 FROM {table}
-            """.format(
-                tmp_file=tmp_file, table=table
-            )
+            """
             assert_equal_ignore_multiple_spaces(self, mock_execute.call_args[0][0], query)
diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py
index 2046e22..9c75513 100644
--- a/tests/serialization/test_dag_serialization.py
+++ b/tests/serialization/test_dag_serialization.py
@@ -197,7 +197,7 @@ def make_user_defined_macro_filter_dag():
         user_defined_macros={
             'next_execution_date': compute_next_execution_date,
         },
-        user_defined_filters={'hello': lambda name: 'Hello %s' % name},
+        user_defined_filters={'hello': lambda name: f'Hello {name}'},
         catchup=False,
     )
     BashOperator(
@@ -731,7 +731,7 @@ class TestStringifiedDAGs(unittest.TestCase):
                 setattr(self, key, value)
 
         def __str__(self):
-            return "{}({})".format(self.__class__.__name__, str(self.__dict__))
+            return f"{self.__class__.__name__}({str(self.__dict__)})"
 
         def __repr__(self):
             return self.__str__()
diff --git a/tests/test_utils/gcp_system_helpers.py b/tests/test_utils/gcp_system_helpers.py
index 6572111..314f09a 100644
--- a/tests/test_utils/gcp_system_helpers.py
+++ b/tests/test_utils/gcp_system_helpers.py
@@ -184,7 +184,7 @@ class GoogleSystemTest(SystemTest):
                 "gsutil",
                 "iam",
                 "ch",
-                "serviceAccount:%s:admin" % account_email,
+                f"serviceAccount:{account_email}:admin",
                 bucket_name,
             ]
         )
diff --git a/tests/test_utils/logging_command_executor.py b/tests/test_utils/logging_command_executor.py
index 1ebf729..5fca244 100644
--- a/tests/test_utils/logging_command_executor.py
+++ b/tests/test_utils/logging_command_executor.py
@@ -57,7 +57,7 @@ class LoggingCommandExecutor(LoggingMixin):
             self.log.info("Stdout: %s", output)
             self.log.info("Stderr: %s", err)
             raise AirflowException(
-                "Retcode {} on {} with stdout: {}, stderr: {}".format(retcode, " ".join(cmd), output, err)
+                f"Retcode {retcode} on {' '.join(cmd)} with stdout: {output}, stderr: {err}"
             )
         return output
 
diff --git a/tests/test_utils/mock_operators.py b/tests/test_utils/mock_operators.py
index 534770e..989b984 100644
--- a/tests/test_utils/mock_operators.py
+++ b/tests/test_utils/mock_operators.py
@@ -82,7 +82,7 @@ class CustomBaseIndexOpLink(BaseOperatorLink):
 
     @property
     def name(self) -> str:
-        return 'BigQuery Console #{index}'.format(index=self.index + 1)
+        return f'BigQuery Console #{self.index + 1}'
 
     def get_link(self, operator, dttm):
         ti = TaskInstance(task=operator, execution_date=dttm)
diff --git a/tests/utils/test_helpers.py b/tests/utils/test_helpers.py
index fffa2d4..0477c01 100644
--- a/tests/utils/test_helpers.py
+++ b/tests/utils/test_helpers.py
@@ -43,9 +43,7 @@ class TestHelpers(unittest.TestCase):
         filename_template = "{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log"
 
         ts = ti.get_template_context()['ts']
-        expected_filename = "{dag_id}/{task_id}/{ts}/{try_number}.log".format(
-            dag_id=dag_id, task_id=task_id, ts=ts, try_number=try_number
-        )
+        expected_filename = f"{dag_id}/{task_id}/{ts}/{try_number}.log"
 
         rendered_filename = helpers.render_log_filename(ti, try_number, filename_template)
 
diff --git a/tests/www/api/experimental/test_endpoints.py b/tests/www/api/experimental/test_endpoints.py
index 5981eac..1a379d9 100644
--- a/tests/www/api/experimental/test_endpoints.py
+++ b/tests/www/api/experimental/test_endpoints.py
@@ -368,7 +368,7 @@ class TestPoolApiExperimental(TestBase):
         clear_db_pools()
         self.pools = [Pool.get_default_pool()]
         for i in range(self.USER_POOL_COUNT):
-            name = 'experimental_%s' % (i + 1)
+            name = f'experimental_{i + 1}'
             pool = Pool(
                 pool=name,
                 slots=i,
diff --git a/tests/www/test_views.py b/tests/www/test_views.py
index 5011547..929076b 100644
--- a/tests/www/test_views.py
+++ b/tests/www/test_views.py
@@ -757,7 +757,7 @@ class TestAirflowBaseViews(TestBase):
         url = 'dag_details?dag_id=test_tree_view'
         resp = self.client.get(url, follow_redirects=True)
         params = {'dag_id': 'test_tree_view', 'origin': '/tree?dag_id=test_tree_view'}
-        href = "/trigger?{}".format(html.escape(urllib.parse.urlencode(params)))
+        href = f"/trigger?{html.escape(urllib.parse.urlencode(params))}"
         self.check_content_in_response(href, resp)
 
     def test_dag_details_trigger_origin_graph_view(self):
@@ -772,7 +772,7 @@ class TestAirflowBaseViews(TestBase):
         url = 'dag_details?dag_id=test_graph_view'
         resp = self.client.get(url, follow_redirects=True)
         params = {'dag_id': 'test_graph_view', 'origin': '/graph?dag_id=test_graph_view'}
-        href = "/trigger?{}".format(html.escape(urllib.parse.urlencode(params)))
+        href = f"/trigger?{html.escape(urllib.parse.urlencode(params))}"
         self.check_content_in_response(href, resp)
 
     def test_dag_details_subdag(self):
@@ -1177,9 +1177,7 @@ class TestLogView(TestBase):
     DAG_ID_REMOVED = 'removed_dag_for_testing_log_view'
     TASK_ID = 'task_for_testing_log_view'
     DEFAULT_DATE = timezone.datetime(2017, 9, 1)
-    ENDPOINT = 'log?dag_id={dag_id}&task_id={task_id}&execution_date={execution_date}'.format(
-        dag_id=DAG_ID, task_id=TASK_ID, execution_date=DEFAULT_DATE
-    )
+    ENDPOINT = f'log?dag_id={DAG_ID}&task_id={TASK_ID}&execution_date={DEFAULT_DATE}'
 
     def setUp(self):
         # Make sure that the configure_logging is not cached
@@ -1277,7 +1275,7 @@ class TestLogView(TestBase):
         for num in range(1, expected_num_logs_visible + 1):
             assert f'log-group-{num}' in response.data.decode('utf-8')
         assert 'log-group-0' not in response.data.decode('utf-8')
-        assert 'log-group-{}'.format(expected_num_logs_visible + 1) not in response.data.decode('utf-8')
+        assert f'log-group-{expected_num_logs_visible + 1}' not in response.data.decode('utf-8')
 
     def test_get_logs_with_metadata_as_download_file(self):
         url_template = (
@@ -1540,7 +1538,7 @@ class ViewWithDateTimeAndNumRunsAndDagRunsFormTester:
         Should set base date to execution date.
         """
         response = self.test.client.get(
-            self.endpoint + '&execution_date={}'.format(self.runs[1].execution_date.isoformat()),
+            self.endpoint + f'&execution_date={self.runs[1].execution_date.isoformat()}',
             data=dict(username='test', password='test'),
             follow_redirects=True,
         )
@@ -1563,7 +1561,7 @@ class ViewWithDateTimeAndNumRunsAndDagRunsFormTester:
         Should set base date and num runs to submitted values.
         """
         response = self.test.client.get(
-            self.endpoint + '&base_date={}&num_runs=2'.format(self.runs[1].execution_date.isoformat()),
+            self.endpoint + f'&base_date={self.runs[1].execution_date.isoformat()}&num_runs=2',
             data=dict(username='test', password='test'),
             follow_redirects=True,
         )
@@ -2849,7 +2847,7 @@ class TestExtraLinks(TestBase):
             name = 'foo-bar'
 
             def get_link(self, operator, dttm):
-                return 'http://www.example.com/{}/{}/{}'.format(operator.task_id, 'foo-bar', dttm)
+                return f"http://www.example.com/{operator.task_id}/foo-bar/{dttm}"
 
         class AirflowLink(BaseOperatorLink):
             name = 'airflow'


[airflow] 28/41: Support google-cloud-tasks>=2.0.0 (#13347)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit c518035d1de9bce911e3d682b47f47c4ba45b1ec
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Thu Jan 14 12:18:49 2021 +0100

    Support google-cloud-tasks>=2.0.0 (#13347)
    
    (cherry picked from commit ef8617ec9d6e4b7c433a29bd388f5102a7a17c11)
---
 airflow/providers/google/ADDITIONAL_INFO.md        |   4 +-
 airflow/providers/google/cloud/hooks/tasks.py      | 118 ++++++++---------
 airflow/providers/google/cloud/operators/tasks.py  |  41 +++---
 setup.py                                           |   2 +-
 tests/providers/google/cloud/hooks/test_tasks.py   |  86 ++++++-------
 .../providers/google/cloud/operators/test_tasks.py | 140 ++++++++++++++++-----
 6 files changed, 235 insertions(+), 156 deletions(-)

diff --git a/airflow/providers/google/ADDITIONAL_INFO.md b/airflow/providers/google/ADDITIONAL_INFO.md
index 800703b..c696e1b 100644
--- a/airflow/providers/google/ADDITIONAL_INFO.md
+++ b/airflow/providers/google/ADDITIONAL_INFO.md
@@ -32,10 +32,10 @@ Details are covered in the UPDATING.md files for each library, but there are som
 | [``google-cloud-automl``](https://pypi.org/project/google-cloud-automl/) | ``>=0.4.0,<2.0.0`` | ``>=2.1.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-bigquery-automl/blob/master/UPGRADING.md) |
 | [``google-cloud-bigquery-datatransfer``](https://pypi.org/project/google-cloud-bigquery-datatransfer/) | ``>=0.4.0,<2.0.0`` | ``>=3.0.0,<4.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-bigquery-datatransfer/blob/master/UPGRADING.md) |
 | [``google-cloud-datacatalog``](https://pypi.org/project/google-cloud-datacatalog/) | ``>=0.5.0,<0.8`` | ``>=3.0.0,<4.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-datacatalog/blob/master/UPGRADING.md) |
+| [``google-cloud-kms``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-kms/blob/master/UPGRADING.md) |
 | [``google-cloud-os-login``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-oslogin/blob/master/UPGRADING.md) |
 | [``google-cloud-pubsub``](https://pypi.org/project/google-cloud-pubsub/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-pubsub/blob/master/UPGRADING.md) |
-| [``google-cloud-kms``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-kms/blob/master/UPGRADING.md) |
-
+| [``google-cloud-tasks``](https://pypi.org/project/google-cloud-tasks/) | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-tasks/blob/master/UPGRADING.md) |
 
 ### The field names use the snake_case convention
 
diff --git a/airflow/providers/google/cloud/hooks/tasks.py b/airflow/providers/google/cloud/hooks/tasks.py
index 1c3223d..633f227 100644
--- a/airflow/providers/google/cloud/hooks/tasks.py
+++ b/airflow/providers/google/cloud/hooks/tasks.py
@@ -21,11 +21,13 @@ This module contains a CloudTasksHook
 which allows you to connect to Google Cloud Tasks service,
 performing actions to queues or tasks.
 """
+
 from typing import Dict, List, Optional, Sequence, Tuple, Union
 
 from google.api_core.retry import Retry
-from google.cloud.tasks_v2 import CloudTasksClient, enums
-from google.cloud.tasks_v2.types import FieldMask, Queue, Task
+from google.cloud.tasks_v2 import CloudTasksClient
+from google.cloud.tasks_v2.types import Queue, Task
+from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow.exceptions import AirflowException
 from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
@@ -120,20 +122,19 @@ class CloudTasksHook(GoogleBaseHook):
         client = self.get_conn()
 
         if queue_name:
-            full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name)
+            full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
             if isinstance(task_queue, Queue):
                 task_queue.name = full_queue_name
             elif isinstance(task_queue, dict):
                 task_queue['name'] = full_queue_name
             else:
                 raise AirflowException('Unable to set queue_name.')
-        full_location_path = CloudTasksClient.location_path(project_id, location)
+        full_location_path = f"projects/{project_id}/locations/{location}"
         return client.create_queue(
-            parent=full_location_path,
-            queue=task_queue,
+            request={'parent': full_location_path, 'queue': task_queue},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -167,7 +168,7 @@ class CloudTasksHook(GoogleBaseHook):
         :param update_mask: A mast used to specify which fields of the queue are being updated.
             If empty, then all fields will be updated.
             If a dict is provided, it must be of the same form as the protobuf message.
-        :type update_mask: dict or google.cloud.tasks_v2.types.FieldMask
+        :type update_mask: dict or google.protobuf.field_mask_pb2.FieldMask
         :param retry: (Optional) A retry object used to retry requests.
             If None is specified, requests will not be retried.
         :type retry: google.api_core.retry.Retry
@@ -182,7 +183,7 @@ class CloudTasksHook(GoogleBaseHook):
         client = self.get_conn()
 
         if queue_name and location:
-            full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name)
+            full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
             if isinstance(task_queue, Queue):
                 task_queue.name = full_queue_name
             elif isinstance(task_queue, dict):
@@ -190,11 +191,10 @@ class CloudTasksHook(GoogleBaseHook):
             else:
                 raise AirflowException('Unable to set queue_name.')
         return client.update_queue(
-            queue=task_queue,
-            update_mask=update_mask,
+            request={'queue': task_queue, 'update_mask': update_mask},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -230,8 +230,10 @@ class CloudTasksHook(GoogleBaseHook):
         """
         client = self.get_conn()
 
-        full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name)
-        return client.get_queue(name=full_queue_name, retry=retry, timeout=timeout, metadata=metadata)
+        full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
+        return client.get_queue(
+            request={'name': full_queue_name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
 
     @GoogleBaseHook.fallback_to_default_project_id
     def list_queues(
@@ -270,14 +272,12 @@ class CloudTasksHook(GoogleBaseHook):
         """
         client = self.get_conn()
 
-        full_location_path = CloudTasksClient.location_path(project_id, location)
+        full_location_path = f"projects/{project_id}/locations/{location}"
         queues = client.list_queues(
-            parent=full_location_path,
-            filter_=results_filter,
-            page_size=page_size,
+            request={'parent': full_location_path, 'filter': results_filter, 'page_size': page_size},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         return list(queues)
 
@@ -313,8 +313,10 @@ class CloudTasksHook(GoogleBaseHook):
         """
         client = self.get_conn()
 
-        full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name)
-        client.delete_queue(name=full_queue_name, retry=retry, timeout=timeout, metadata=metadata)
+        full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
+        client.delete_queue(
+            request={'name': full_queue_name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
 
     @GoogleBaseHook.fallback_to_default_project_id
     def purge_queue(
@@ -349,8 +351,10 @@ class CloudTasksHook(GoogleBaseHook):
         """
         client = self.get_conn()
 
-        full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name)
-        return client.purge_queue(name=full_queue_name, retry=retry, timeout=timeout, metadata=metadata)
+        full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
+        return client.purge_queue(
+            request={'name': full_queue_name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
 
     @GoogleBaseHook.fallback_to_default_project_id
     def pause_queue(
@@ -385,8 +389,10 @@ class CloudTasksHook(GoogleBaseHook):
         """
         client = self.get_conn()
 
-        full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name)
-        return client.pause_queue(name=full_queue_name, retry=retry, timeout=timeout, metadata=metadata)
+        full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
+        return client.pause_queue(
+            request={'name': full_queue_name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
 
     @GoogleBaseHook.fallback_to_default_project_id
     def resume_queue(
@@ -421,8 +427,10 @@ class CloudTasksHook(GoogleBaseHook):
         """
         client = self.get_conn()
 
-        full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name)
-        return client.resume_queue(name=full_queue_name, retry=retry, timeout=timeout, metadata=metadata)
+        full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
+        return client.resume_queue(
+            request={'name': full_queue_name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
 
     @GoogleBaseHook.fallback_to_default_project_id
     def create_task(
@@ -432,7 +440,7 @@ class CloudTasksHook(GoogleBaseHook):
         task: Union[Dict, Task],
         project_id: str,
         task_name: Optional[str] = None,
-        response_view: Optional[enums.Task.View] = None,
+        response_view: Optional = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
@@ -455,7 +463,7 @@ class CloudTasksHook(GoogleBaseHook):
         :type task_name: str
         :param response_view: (Optional) This field specifies which subset of the Task will
             be returned.
-        :type response_view: google.cloud.tasks_v2.enums.Task.View
+        :type response_view: google.cloud.tasks_v2.Task.View
         :param retry: (Optional) A retry object used to retry requests.
             If None is specified, requests will not be retried.
         :type retry: google.api_core.retry.Retry
@@ -470,21 +478,21 @@ class CloudTasksHook(GoogleBaseHook):
         client = self.get_conn()
 
         if task_name:
-            full_task_name = CloudTasksClient.task_path(project_id, location, queue_name, task_name)
+            full_task_name = (
+                f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}"
+            )
             if isinstance(task, Task):
                 task.name = full_task_name
             elif isinstance(task, dict):
                 task['name'] = full_task_name
             else:
                 raise AirflowException('Unable to set task_name.')
-        full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name)
+        full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
         return client.create_task(
-            parent=full_queue_name,
-            task=task,
-            response_view=response_view,
+            request={'parent': full_queue_name, 'task': task, 'response_view': response_view},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -494,7 +502,7 @@ class CloudTasksHook(GoogleBaseHook):
         queue_name: str,
         task_name: str,
         project_id: str,
-        response_view: Optional[enums.Task.View] = None,
+        response_view: Optional = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
@@ -513,7 +521,7 @@ class CloudTasksHook(GoogleBaseHook):
         :type project_id: str
         :param response_view: (Optional) This field specifies which subset of the Task will
             be returned.
-        :type response_view: google.cloud.tasks_v2.enums.Task.View
+        :type response_view: google.cloud.tasks_v2.Task.View
         :param retry: (Optional) A retry object used to retry requests.
             If None is specified, requests will not be retried.
         :type retry: google.api_core.retry.Retry
@@ -527,13 +535,12 @@ class CloudTasksHook(GoogleBaseHook):
         """
         client = self.get_conn()
 
-        full_task_name = CloudTasksClient.task_path(project_id, location, queue_name, task_name)
+        full_task_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}"
         return client.get_task(
-            name=full_task_name,
-            response_view=response_view,
+            request={'name': full_task_name, 'response_view': response_view},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -542,7 +549,7 @@ class CloudTasksHook(GoogleBaseHook):
         location: str,
         queue_name: str,
         project_id: str,
-        response_view: Optional[enums.Task.View] = None,
+        response_view: Optional = None,
         page_size: Optional[int] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
@@ -560,7 +567,7 @@ class CloudTasksHook(GoogleBaseHook):
         :type project_id: str
         :param response_view: (Optional) This field specifies which subset of the Task will
             be returned.
-        :type response_view: google.cloud.tasks_v2.enums.Task.View
+        :type response_view: google.cloud.tasks_v2.Task.View
         :param page_size: (Optional) The maximum number of resources contained in the
             underlying API response.
         :type page_size: int
@@ -576,14 +583,12 @@ class CloudTasksHook(GoogleBaseHook):
         :rtype: list[google.cloud.tasks_v2.types.Task]
         """
         client = self.get_conn()
-        full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name)
+        full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
         tasks = client.list_tasks(
-            parent=full_queue_name,
-            response_view=response_view,
-            page_size=page_size,
+            request={'parent': full_queue_name, 'response_view': response_view, 'page_size': page_size},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
         return list(tasks)
 
@@ -622,8 +627,10 @@ class CloudTasksHook(GoogleBaseHook):
         """
         client = self.get_conn()
 
-        full_task_name = CloudTasksClient.task_path(project_id, location, queue_name, task_name)
-        client.delete_task(name=full_task_name, retry=retry, timeout=timeout, metadata=metadata)
+        full_task_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}"
+        client.delete_task(
+            request={'name': full_task_name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
 
     @GoogleBaseHook.fallback_to_default_project_id
     def run_task(
@@ -632,7 +639,7 @@ class CloudTasksHook(GoogleBaseHook):
         queue_name: str,
         task_name: str,
         project_id: str,
-        response_view: Optional[enums.Task.View] = None,
+        response_view: Optional = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
         metadata: Optional[Sequence[Tuple[str, str]]] = None,
@@ -651,7 +658,7 @@ class CloudTasksHook(GoogleBaseHook):
         :type project_id: str
         :param response_view: (Optional) This field specifies which subset of the Task will
             be returned.
-        :type response_view: google.cloud.tasks_v2.enums.Task.View
+        :type response_view: google.cloud.tasks_v2.Task.View
         :param retry: (Optional) A retry object used to retry requests.
             If None is specified, requests will not be retried.
         :type retry: google.api_core.retry.Retry
@@ -665,11 +672,10 @@ class CloudTasksHook(GoogleBaseHook):
         """
         client = self.get_conn()
 
-        full_task_name = CloudTasksClient.task_path(project_id, location, queue_name, task_name)
+        full_task_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}"
         return client.run_task(
-            name=full_task_name,
-            response_view=response_view,
+            request={'name': full_task_name, 'response_view': response_view},
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
diff --git a/airflow/providers/google/cloud/operators/tasks.py b/airflow/providers/google/cloud/operators/tasks.py
index 6598d66..2834b32 100644
--- a/airflow/providers/google/cloud/operators/tasks.py
+++ b/airflow/providers/google/cloud/operators/tasks.py
@@ -25,9 +25,8 @@ from typing import Dict, Optional, Sequence, Tuple, Union
 
 from google.api_core.exceptions import AlreadyExists
 from google.api_core.retry import Retry
-from google.cloud.tasks_v2 import enums
-from google.cloud.tasks_v2.types import FieldMask, Queue, Task
-from google.protobuf.json_format import MessageToDict
+from google.cloud.tasks_v2.types import Queue, Task
+from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow.models import BaseOperator
 from airflow.providers.google.cloud.hooks.tasks import CloudTasksHook
@@ -136,7 +135,7 @@ class CloudTasksQueueCreateOperator(BaseOperator):
                 metadata=self.metadata,
             )
 
-        return MessageToDict(queue)
+        return Queue.to_dict(queue)
 
 
 class CloudTasksQueueUpdateOperator(BaseOperator):
@@ -159,7 +158,7 @@ class CloudTasksQueueUpdateOperator(BaseOperator):
     :param update_mask: A mast used to specify which fields of the queue are being updated.
         If empty, then all fields will be updated.
         If a dict is provided, it must be of the same form as the protobuf message.
-    :type update_mask: dict or google.cloud.tasks_v2.types.FieldMask
+    :type update_mask: dict or google.protobuf.field_mask_pb2.FieldMask
     :param retry: (Optional) A retry object used to retry requests.
         If None is specified, requests will not be retried.
     :type retry: google.api_core.retry.Retry
@@ -237,7 +236,7 @@ class CloudTasksQueueUpdateOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(queue)
+        return Queue.to_dict(queue)
 
 
 class CloudTasksQueueGetOperator(BaseOperator):
@@ -320,7 +319,7 @@ class CloudTasksQueueGetOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(queue)
+        return Queue.to_dict(queue)
 
 
 class CloudTasksQueuesListOperator(BaseOperator):
@@ -408,7 +407,7 @@ class CloudTasksQueuesListOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return [MessageToDict(q) for q in queues]
+        return [Queue.to_dict(q) for q in queues]
 
 
 class CloudTasksQueueDeleteOperator(BaseOperator):
@@ -571,7 +570,7 @@ class CloudTasksQueuePurgeOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(queue)
+        return Queue.to_dict(queue)
 
 
 class CloudTasksQueuePauseOperator(BaseOperator):
@@ -646,7 +645,7 @@ class CloudTasksQueuePauseOperator(BaseOperator):
             gcp_conn_id=self.gcp_conn_id,
             impersonation_chain=self.impersonation_chain,
         )
-        queues = hook.pause_queue(
+        queue = hook.pause_queue(
             location=self.location,
             queue_name=self.queue_name,
             project_id=self.project_id,
@@ -654,7 +653,7 @@ class CloudTasksQueuePauseOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return [MessageToDict(q) for q in queues]
+        return Queue.to_dict(queue)
 
 
 class CloudTasksQueueResumeOperator(BaseOperator):
@@ -737,7 +736,7 @@ class CloudTasksQueueResumeOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(queue)
+        return Queue.to_dict(queue)
 
 
 class CloudTasksTaskCreateOperator(BaseOperator):
@@ -803,7 +802,7 @@ class CloudTasksTaskCreateOperator(BaseOperator):
         task: Union[Dict, Task],
         project_id: Optional[str] = None,
         task_name: Optional[str] = None,
-        response_view: Optional[enums.Task.View] = None,
+        response_view: Optional = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
@@ -840,7 +839,7 @@ class CloudTasksTaskCreateOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(task)
+        return Task.to_dict(task)
 
 
 class CloudTasksTaskGetOperator(BaseOperator):
@@ -900,7 +899,7 @@ class CloudTasksTaskGetOperator(BaseOperator):
         queue_name: str,
         task_name: str,
         project_id: Optional[str] = None,
-        response_view: Optional[enums.Task.View] = None,
+        response_view: Optional = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
@@ -935,7 +934,7 @@ class CloudTasksTaskGetOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(task)
+        return Task.to_dict(task)
 
 
 class CloudTasksTasksListOperator(BaseOperator):
@@ -994,7 +993,7 @@ class CloudTasksTasksListOperator(BaseOperator):
         location: str,
         queue_name: str,
         project_id: Optional[str] = None,
-        response_view: Optional[enums.Task.View] = None,
+        response_view: Optional = None,
         page_size: Optional[int] = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
@@ -1030,7 +1029,7 @@ class CloudTasksTasksListOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return [MessageToDict(t) for t in tasks]
+        return [Task.to_dict(t) for t in tasks]
 
 
 class CloudTasksTaskDeleteOperator(BaseOperator):
@@ -1134,7 +1133,7 @@ class CloudTasksTaskRunOperator(BaseOperator):
     :type project_id: str
     :param response_view: (Optional) This field specifies which subset of the Task will
         be returned.
-    :type response_view: google.cloud.tasks_v2.enums.Task.View
+    :type response_view: google.cloud.tasks_v2.Task.View
     :param retry: (Optional) A retry object used to retry requests.
         If None is specified, requests will not be retried.
     :type retry: google.api_core.retry.Retry
@@ -1176,7 +1175,7 @@ class CloudTasksTaskRunOperator(BaseOperator):
         queue_name: str,
         task_name: str,
         project_id: Optional[str] = None,
-        response_view: Optional[enums.Task.View] = None,
+        response_view: Optional = None,
         retry: Optional[Retry] = None,
         timeout: Optional[float] = None,
         metadata: Optional[MetaData] = None,
@@ -1211,4 +1210,4 @@ class CloudTasksTaskRunOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        return MessageToDict(task)
+        return Task.to_dict(task)
diff --git a/setup.py b/setup.py
index ff9e65d..520b059 100644
--- a/setup.py
+++ b/setup.py
@@ -302,7 +302,7 @@ google = [
     'google-cloud-spanner>=1.10.0,<2.0.0',
     'google-cloud-speech>=0.36.3,<2.0.0',
     'google-cloud-storage>=1.30,<2.0.0',
-    'google-cloud-tasks>=1.2.1,<2.0.0',
+    'google-cloud-tasks>=2.0.0,<3.0.0',
     'google-cloud-texttospeech>=0.4.0,<2.0.0',
     'google-cloud-translate>=1.5.0,<2.0.0',
     'google-cloud-videointelligence>=1.7.0,<2.0.0',
diff --git a/tests/providers/google/cloud/hooks/test_tasks.py b/tests/providers/google/cloud/hooks/test_tasks.py
index 8be6686..6504595 100644
--- a/tests/providers/google/cloud/hooks/test_tasks.py
+++ b/tests/providers/google/cloud/hooks/test_tasks.py
@@ -72,11 +72,10 @@ class TestCloudTasksHook(unittest.TestCase):
         self.assertIs(result, API_RESPONSE)
 
         get_conn.return_value.create_queue.assert_called_once_with(
-            parent=FULL_LOCATION_PATH,
-            queue=Queue(name=FULL_QUEUE_PATH),
+            request=dict(parent=FULL_LOCATION_PATH, queue=Queue(name=FULL_QUEUE_PATH)),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
@@ -94,11 +93,10 @@ class TestCloudTasksHook(unittest.TestCase):
         self.assertIs(result, API_RESPONSE)
 
         get_conn.return_value.update_queue.assert_called_once_with(
-            queue=Queue(name=FULL_QUEUE_PATH, state=3),
-            update_mask=None,
+            request=dict(queue=Queue(name=FULL_QUEUE_PATH, state=3), update_mask=None),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
@@ -111,30 +109,28 @@ class TestCloudTasksHook(unittest.TestCase):
         self.assertIs(result, API_RESPONSE)
 
         get_conn.return_value.get_queue.assert_called_once_with(
-            name=FULL_QUEUE_PATH, retry=None, timeout=None, metadata=None
+            request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=()
         )
 
     @mock.patch(
         "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn",
-        **{"return_value.list_queues.return_value": API_RESPONSE},  # type: ignore
+        **{"return_value.list_queues.return_value": [Queue(name=FULL_QUEUE_PATH)]},  # type: ignore
     )
     def test_list_queues(self, get_conn):
         result = self.hook.list_queues(location=LOCATION, project_id=PROJECT_ID)
 
-        self.assertEqual(result, list(API_RESPONSE))
+        self.assertEqual(result, [Queue(name=FULL_QUEUE_PATH)])
 
         get_conn.return_value.list_queues.assert_called_once_with(
-            parent=FULL_LOCATION_PATH,
-            filter_=None,
-            page_size=None,
+            request=dict(parent=FULL_LOCATION_PATH, filter=None, page_size=None),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
         "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn",
-        **{"return_value.delete_queue.return_value": API_RESPONSE},  # type: ignore
+        **{"return_value.delete_queue.return_value": None},  # type: ignore
     )
     def test_delete_queue(self, get_conn):
         result = self.hook.delete_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID)
@@ -142,51 +138,51 @@ class TestCloudTasksHook(unittest.TestCase):
         self.assertEqual(result, None)
 
         get_conn.return_value.delete_queue.assert_called_once_with(
-            name=FULL_QUEUE_PATH, retry=None, timeout=None, metadata=None
+            request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=()
         )
 
     @mock.patch(
         "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn",
-        **{"return_value.purge_queue.return_value": API_RESPONSE},  # type: ignore
+        **{"return_value.purge_queue.return_value": Queue(name=FULL_QUEUE_PATH)},  # type: ignore
     )
     def test_purge_queue(self, get_conn):
         result = self.hook.purge_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID)
 
-        self.assertEqual(result, API_RESPONSE)
+        self.assertEqual(result, Queue(name=FULL_QUEUE_PATH))
 
         get_conn.return_value.purge_queue.assert_called_once_with(
-            name=FULL_QUEUE_PATH, retry=None, timeout=None, metadata=None
+            request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=()
         )
 
     @mock.patch(
         "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn",
-        **{"return_value.pause_queue.return_value": API_RESPONSE},  # type: ignore
+        **{"return_value.pause_queue.return_value": Queue(name=FULL_QUEUE_PATH)},  # type: ignore
     )
     def test_pause_queue(self, get_conn):
         result = self.hook.pause_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID)
 
-        self.assertEqual(result, API_RESPONSE)
+        self.assertEqual(result, Queue(name=FULL_QUEUE_PATH))
 
         get_conn.return_value.pause_queue.assert_called_once_with(
-            name=FULL_QUEUE_PATH, retry=None, timeout=None, metadata=None
+            request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=()
         )
 
     @mock.patch(
         "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn",
-        **{"return_value.resume_queue.return_value": API_RESPONSE},  # type: ignore
+        **{"return_value.resume_queue.return_value": Queue(name=FULL_QUEUE_PATH)},  # type: ignore
     )
     def test_resume_queue(self, get_conn):
         result = self.hook.resume_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID)
 
-        self.assertEqual(result, API_RESPONSE)
+        self.assertEqual(result, Queue(name=FULL_QUEUE_PATH))
 
         get_conn.return_value.resume_queue.assert_called_once_with(
-            name=FULL_QUEUE_PATH, retry=None, timeout=None, metadata=None
+            request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=()
         )
 
     @mock.patch(
         "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn",
-        **{"return_value.create_task.return_value": API_RESPONSE},  # type: ignore
+        **{"return_value.create_task.return_value": Task(name=FULL_TASK_PATH)},  # type: ignore
     )
     def test_create_task(self, get_conn):
         result = self.hook.create_task(
@@ -197,20 +193,18 @@ class TestCloudTasksHook(unittest.TestCase):
             task_name=TASK_NAME,
         )
 
-        self.assertEqual(result, API_RESPONSE)
+        self.assertEqual(result, Task(name=FULL_TASK_PATH))
 
         get_conn.return_value.create_task.assert_called_once_with(
-            parent=FULL_QUEUE_PATH,
-            task=Task(name=FULL_TASK_PATH),
-            response_view=None,
+            request=dict(parent=FULL_QUEUE_PATH, task=Task(name=FULL_TASK_PATH), response_view=None),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
         "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn",
-        **{"return_value.get_task.return_value": API_RESPONSE},  # type: ignore
+        **{"return_value.get_task.return_value": Task(name=FULL_TASK_PATH)},  # type: ignore
     )
     def test_get_task(self, get_conn):
         result = self.hook.get_task(
@@ -220,37 +214,34 @@ class TestCloudTasksHook(unittest.TestCase):
             project_id=PROJECT_ID,
         )
 
-        self.assertEqual(result, API_RESPONSE)
+        self.assertEqual(result, Task(name=FULL_TASK_PATH))
 
         get_conn.return_value.get_task.assert_called_once_with(
-            name=FULL_TASK_PATH,
-            response_view=None,
+            request=dict(name=FULL_TASK_PATH, response_view=None),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
         "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn",
-        **{"return_value.list_tasks.return_value": API_RESPONSE},  # type: ignore
+        **{"return_value.list_tasks.return_value": [Task(name=FULL_TASK_PATH)]},  # type: ignore
     )
     def test_list_tasks(self, get_conn):
         result = self.hook.list_tasks(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID)
 
-        self.assertEqual(result, list(API_RESPONSE))
+        self.assertEqual(result, [Task(name=FULL_TASK_PATH)])
 
         get_conn.return_value.list_tasks.assert_called_once_with(
-            parent=FULL_QUEUE_PATH,
-            response_view=None,
-            page_size=None,
+            request=dict(parent=FULL_QUEUE_PATH, response_view=None, page_size=None),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
 
     @mock.patch(
         "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn",
-        **{"return_value.delete_task.return_value": API_RESPONSE},  # type: ignore
+        **{"return_value.delete_task.return_value": None},  # type: ignore
     )
     def test_delete_task(self, get_conn):
         result = self.hook.delete_task(
@@ -263,12 +254,12 @@ class TestCloudTasksHook(unittest.TestCase):
         self.assertEqual(result, None)
 
         get_conn.return_value.delete_task.assert_called_once_with(
-            name=FULL_TASK_PATH, retry=None, timeout=None, metadata=None
+            request=dict(name=FULL_TASK_PATH), retry=None, timeout=None, metadata=()
         )
 
     @mock.patch(
         "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn",
-        **{"return_value.run_task.return_value": API_RESPONSE},  # type: ignore
+        **{"return_value.run_task.return_value": Task(name=FULL_TASK_PATH)},  # type: ignore
     )
     def test_run_task(self, get_conn):
         result = self.hook.run_task(
@@ -278,12 +269,11 @@ class TestCloudTasksHook(unittest.TestCase):
             project_id=PROJECT_ID,
         )
 
-        self.assertEqual(result, API_RESPONSE)
+        self.assertEqual(result, Task(name=FULL_TASK_PATH))
 
         get_conn.return_value.run_task.assert_called_once_with(
-            name=FULL_TASK_PATH,
-            response_view=None,
+            request=dict(name=FULL_TASK_PATH, response_view=None),
             retry=None,
             timeout=None,
-            metadata=None,
+            metadata=(),
         )
diff --git a/tests/providers/google/cloud/operators/test_tasks.py b/tests/providers/google/cloud/operators/test_tasks.py
index cac1441..ed76911 100644
--- a/tests/providers/google/cloud/operators/test_tasks.py
+++ b/tests/providers/google/cloud/operators/test_tasks.py
@@ -45,21 +45,26 @@ QUEUE_ID = "test-queue"
 FULL_QUEUE_PATH = "projects/test-project/locations/asia-east2/queues/test-queue"
 TASK_NAME = "test-task"
 FULL_TASK_PATH = "projects/test-project/locations/asia-east2/queues/test-queue/tasks/test-task"
+TEST_QUEUE = Queue(name=FULL_QUEUE_PATH)
+TEST_TASK = Task(app_engine_http_request={})
 
 
 class TestCloudTasksQueueCreate(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_create_queue(self, mock_hook):
-        mock_hook.return_value.create_queue.return_value = mock.MagicMock()
-        operator = CloudTasksQueueCreateOperator(location=LOCATION, task_queue=Queue(), task_id="id")
-        operator.execute(context=None)
+        mock_hook.return_value.create_queue.return_value = TEST_QUEUE
+        operator = CloudTasksQueueCreateOperator(location=LOCATION, task_queue=TEST_QUEUE, task_id="id")
+
+        result = operator.execute(context=None)
+
+        self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result)
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
         )
         mock_hook.return_value.create_queue.assert_called_once_with(
             location=LOCATION,
-            task_queue=Queue(),
+            task_queue=TEST_QUEUE,
             project_id=None,
             queue_name=None,
             retry=None,
@@ -71,9 +76,12 @@ class TestCloudTasksQueueCreate(unittest.TestCase):
 class TestCloudTasksQueueUpdate(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_update_queue(self, mock_hook):
-        mock_hook.return_value.update_queue.return_value = mock.MagicMock()
+        mock_hook.return_value.update_queue.return_value = TEST_QUEUE
         operator = CloudTasksQueueUpdateOperator(task_queue=Queue(name=FULL_QUEUE_PATH), task_id="id")
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result)
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
@@ -93,9 +101,12 @@ class TestCloudTasksQueueUpdate(unittest.TestCase):
 class TestCloudTasksQueueGet(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_get_queue(self, mock_hook):
-        mock_hook.return_value.get_queue.return_value = mock.MagicMock()
+        mock_hook.return_value.get_queue.return_value = TEST_QUEUE
         operator = CloudTasksQueueGetOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id")
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result)
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
@@ -113,9 +124,12 @@ class TestCloudTasksQueueGet(unittest.TestCase):
 class TestCloudTasksQueuesList(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_list_queues(self, mock_hook):
-        mock_hook.return_value.list_queues.return_value = mock.MagicMock()
+        mock_hook.return_value.list_queues.return_value = [TEST_QUEUE]
         operator = CloudTasksQueuesListOperator(location=LOCATION, task_id="id")
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual([{'name': FULL_QUEUE_PATH, 'state': 0}], result)
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
@@ -134,9 +148,12 @@ class TestCloudTasksQueuesList(unittest.TestCase):
 class TestCloudTasksQueueDelete(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_delete_queue(self, mock_hook):
-        mock_hook.return_value.delete_queue.return_value = mock.MagicMock()
+        mock_hook.return_value.delete_queue.return_value = None
         operator = CloudTasksQueueDeleteOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id")
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual(None, result)
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
@@ -154,9 +171,12 @@ class TestCloudTasksQueueDelete(unittest.TestCase):
 class TestCloudTasksQueuePurge(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_delete_queue(self, mock_hook):
-        mock_hook.return_value.purge_queue.return_value = mock.MagicMock()
+        mock_hook.return_value.purge_queue.return_value = TEST_QUEUE
         operator = CloudTasksQueuePurgeOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id")
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result)
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
@@ -174,9 +194,12 @@ class TestCloudTasksQueuePurge(unittest.TestCase):
 class TestCloudTasksQueuePause(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_pause_queue(self, mock_hook):
-        mock_hook.return_value.pause_queue.return_value = mock.MagicMock()
+        mock_hook.return_value.pause_queue.return_value = TEST_QUEUE
         operator = CloudTasksQueuePauseOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id")
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result)
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
@@ -194,9 +217,12 @@ class TestCloudTasksQueuePause(unittest.TestCase):
 class TestCloudTasksQueueResume(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_resume_queue(self, mock_hook):
-        mock_hook.return_value.resume_queue.return_value = mock.MagicMock()
+        mock_hook.return_value.resume_queue.return_value = TEST_QUEUE
         operator = CloudTasksQueueResumeOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id")
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result)
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
@@ -214,11 +240,23 @@ class TestCloudTasksQueueResume(unittest.TestCase):
 class TestCloudTasksTaskCreate(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_create_task(self, mock_hook):
-        mock_hook.return_value.create_task.return_value = mock.MagicMock()
+        mock_hook.return_value.create_task.return_value = TEST_TASK
         operator = CloudTasksTaskCreateOperator(
             location=LOCATION, queue_name=QUEUE_ID, task=Task(), task_id="id"
         )
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual(
+            {
+                'app_engine_http_request': {'body': '', 'headers': {}, 'http_method': 0, 'relative_uri': ''},
+                'dispatch_count': 0,
+                'name': '',
+                'response_count': 0,
+                'view': 0,
+            },
+            result,
+        )
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
@@ -239,11 +277,23 @@ class TestCloudTasksTaskCreate(unittest.TestCase):
 class TestCloudTasksTaskGet(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_get_task(self, mock_hook):
-        mock_hook.return_value.get_task.return_value = mock.MagicMock()
+        mock_hook.return_value.get_task.return_value = TEST_TASK
         operator = CloudTasksTaskGetOperator(
             location=LOCATION, queue_name=QUEUE_ID, task_name=TASK_NAME, task_id="id"
         )
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual(
+            {
+                'app_engine_http_request': {'body': '', 'headers': {}, 'http_method': 0, 'relative_uri': ''},
+                'dispatch_count': 0,
+                'name': '',
+                'response_count': 0,
+                'view': 0,
+            },
+            result,
+        )
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
@@ -263,9 +313,28 @@ class TestCloudTasksTaskGet(unittest.TestCase):
 class TestCloudTasksTasksList(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_list_tasks(self, mock_hook):
-        mock_hook.return_value.list_tasks.return_value = mock.MagicMock()
+        mock_hook.return_value.list_tasks.return_value = [TEST_TASK]
         operator = CloudTasksTasksListOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id")
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual(
+            [
+                {
+                    'app_engine_http_request': {
+                        'body': '',
+                        'headers': {},
+                        'http_method': 0,
+                        'relative_uri': '',
+                    },
+                    'dispatch_count': 0,
+                    'name': '',
+                    'response_count': 0,
+                    'view': 0,
+                }
+            ],
+            result,
+        )
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
@@ -285,11 +354,14 @@ class TestCloudTasksTasksList(unittest.TestCase):
 class TestCloudTasksTaskDelete(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_delete_task(self, mock_hook):
-        mock_hook.return_value.delete_task.return_value = mock.MagicMock()
+        mock_hook.return_value.delete_task.return_value = None
         operator = CloudTasksTaskDeleteOperator(
             location=LOCATION, queue_name=QUEUE_ID, task_name=TASK_NAME, task_id="id"
         )
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual(None, result)
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,
@@ -308,11 +380,23 @@ class TestCloudTasksTaskDelete(unittest.TestCase):
 class TestCloudTasksTaskRun(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook")
     def test_run_task(self, mock_hook):
-        mock_hook.return_value.run_task.return_value = mock.MagicMock()
+        mock_hook.return_value.run_task.return_value = TEST_TASK
         operator = CloudTasksTaskRunOperator(
             location=LOCATION, queue_name=QUEUE_ID, task_name=TASK_NAME, task_id="id"
         )
-        operator.execute(context=None)
+
+        result = operator.execute(context=None)
+
+        self.assertEqual(
+            {
+                'app_engine_http_request': {'body': '', 'headers': {}, 'http_method': 0, 'relative_uri': ''},
+                'dispatch_count': 0,
+                'name': '',
+                'response_count': 0,
+                'view': 0,
+            },
+            result,
+        )
         mock_hook.assert_called_once_with(
             gcp_conn_id=GCP_CONN_ID,
             impersonation_chain=None,


[airflow] 07/41: Update documents for using MySQL (#14174)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 400e078ff4fd523354bc0a7fe664bbece324e5fa
Author: Gabriel <ga...@gmail.com>
AuthorDate: Fri Feb 12 01:09:20 2021 +0800

    Update documents for using MySQL (#14174)
    
    Co-authored-by: Jarek Potiuk <ja...@potiuk.com>
    Co-authored-by: Kamil Breguła <mi...@users.noreply.github.com>
    (cherry picked from commit a3b9f1e489129c197f1cab1b01768b1affe08d2e)
---
 docs/apache-airflow/howto/set-up-database.rst | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/docs/apache-airflow/howto/set-up-database.rst b/docs/apache-airflow/howto/set-up-database.rst
index b13fdc4..3afdff1 100644
--- a/docs/apache-airflow/howto/set-up-database.rst
+++ b/docs/apache-airflow/howto/set-up-database.rst
@@ -67,7 +67,7 @@ In the example below, a database ``airflow_db`` and user  with username ``airflo
 
 .. code-block:: sql
 
-   CREATE DATABASE airflow_db CHARACTER SET utf8 COLLATE utf8_unicode_ci;
+   CREATE DATABASE airflow_db CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
    CREATE USER 'airflow_user' IDENTIFIED BY 'airflow_pass';
    GRANT ALL PRIVILEGES ON airflow_db.* TO 'airflow_user';
 
@@ -91,6 +91,8 @@ without any cert options provided.
 However if you want to use other drivers visit the `MySQL Dialect <https://docs.sqlalchemy.org/en/13/dialects/mysql.html>`__  in SQLAlchemy documentation for more information regarding download
 and setup of the SqlAlchemy connection.
 
+In addition, you also should pay particular attention to MySQL's encoding. Although the ``utf8mb4`` character set is more and more popular for MySQL (actually, ``utf8mb4`` becomes default character set in MySQL8.0), using the ``utf8mb4`` encoding requires additional setting in Airflow 2+ (See more details in `#7570 <https://github.com/apache/airflow/pull/7570>`__.). If you use ``utf8mb4`` as character set, you should also set ``sql_engine_collation_for_ids=utf8mb3_general_ci``.
+
 Setting up a PostgreSQL Database
 --------------------------------
 


[airflow] 35/41: Remove testfixtures module that is only used once (#14318)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit ad50ff27d7b2aef7c406620706675c6e1b494bcb
Author: Ash Berlin-Taylor <as...@firemirror.com>
AuthorDate: Mon Feb 22 20:17:31 2021 +0000

    Remove testfixtures module that is only used once (#14318)
    
    This is only used in a single test, everywhere else we use Pytest or
    unittest's built in feature
    
    (cherry picked from commit 3a046faaeb457572b1484faf158cc96eb81df44a)
---
 setup.py                                         |  1 -
 tests/providers/amazon/aws/hooks/test_glacier.py | 65 ++++++++++--------------
 2 files changed, 27 insertions(+), 39 deletions(-)

diff --git a/setup.py b/setup.py
index 92eb113..ad4fdd5 100644
--- a/setup.py
+++ b/setup.py
@@ -508,7 +508,6 @@ devel = [
     'pywinrm',
     'qds-sdk>=1.9.6',
     'requests_mock',
-    'testfixtures',
     'wheel',
     'yamllint',
 ]
diff --git a/tests/providers/amazon/aws/hooks/test_glacier.py b/tests/providers/amazon/aws/hooks/test_glacier.py
index c1c86a5..c22620f 100644
--- a/tests/providers/amazon/aws/hooks/test_glacier.py
+++ b/tests/providers/amazon/aws/hooks/test_glacier.py
@@ -19,8 +19,6 @@
 import unittest
 from unittest import mock
 
-from testfixtures import LogCapture
-
 from airflow.providers.amazon.aws.hooks.glacier import GlacierHook
 
 CREDENTIALS = "aws_conn"
@@ -52,26 +50,20 @@ class TestAmazonGlacierHook(unittest.TestCase):
         # given
         job_id = {"jobId": "1234abcd"}
         # when
-        with LogCapture() as log:
+        with self.assertLogs() as log:
             mock_conn.return_value.initiate_job.return_value = job_id
             self.hook.retrieve_inventory(VAULT_NAME)
             # then
-            log.check(
-                (
-                    'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
-                    'INFO',
-                    f"Retrieving inventory for vault: {VAULT_NAME}",
-                ),
-                (
-                    'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
-                    'INFO',
-                    f"Initiated inventory-retrieval job for: {VAULT_NAME}",
-                ),
-                (
-                    'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
-                    'INFO',
-                    f"Retrieval Job ID: {job_id.get('jobId')}",
-                ),
+            self.assertEqual(
+                log.output,
+                [
+                    'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+                    + f"Retrieving inventory for vault: {VAULT_NAME}",
+                    'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+                    + f"Initiated inventory-retrieval job for: {VAULT_NAME}",
+                    'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+                    + f"Retrieval Job ID: {job_id.get('jobId')}",
+                ],
             )
 
     @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
@@ -86,16 +78,16 @@ class TestAmazonGlacierHook(unittest.TestCase):
     @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
     def test_retrieve_inventory_results_should_log_mgs(self, mock_conn):
         # when
-        with LogCapture() as log:
+        with self.assertLogs() as log:
             mock_conn.return_value.get_job_output.return_value = REQUEST_RESULT
             self.hook.retrieve_inventory_results(VAULT_NAME, JOB_ID)
             # then
-            log.check(
-                (
-                    'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
-                    'INFO',
-                    f"Retrieving the job results for vault: {VAULT_NAME}...",
-                ),
+            self.assertEqual(
+                log.output,
+                [
+                    'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+                    + f"Retrieving the job results for vault: {VAULT_NAME}...",
+                ],
             )
 
     @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
@@ -110,19 +102,16 @@ class TestAmazonGlacierHook(unittest.TestCase):
     @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
     def test_describe_job_should_log_mgs(self, mock_conn):
         # when
-        with LogCapture() as log:
+        with self.assertLogs() as log:
             mock_conn.return_value.describe_job.return_value = JOB_STATUS
             self.hook.describe_job(VAULT_NAME, JOB_ID)
             # then
-            log.check(
-                (
-                    'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
-                    'INFO',
-                    f"Retrieving status for vault: {VAULT_NAME} and job {JOB_ID}",
-                ),
-                (
-                    'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
-                    'INFO',
-                    f"Job status: {JOB_STATUS.get('Action')}, code status: {JOB_STATUS.get('StatusCode')}",
-                ),
+            self.assertEqual(
+                log.output,
+                [
+                    'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+                    + f"Retrieving status for vault: {VAULT_NAME} and job {JOB_ID}",
+                    'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
+                    + f"Job status: {JOB_STATUS.get('Action')}, code status: {JOB_STATUS.get('StatusCode')}",
+                ],
             )


[airflow] 40/41: Fix failing docs build on Master (#14465)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 1bb177dbc27ec4e252862359906b21be4e7d3892
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Thu Feb 25 18:56:55 2021 +0000

    Fix failing docs build on Master (#14465)
    
    https://github.com/apache/airflow/pull/14030 caused this issue
    (cherry picked from commit 4455f14732c207ec213703b8b8c68efeb8b6aebe)
---
 docs/apache-airflow-providers-tableau/index.rst | 6 ------
 1 file changed, 6 deletions(-)

diff --git a/docs/apache-airflow-providers-tableau/index.rst b/docs/apache-airflow-providers-tableau/index.rst
index 47ace94..ce74925 100644
--- a/docs/apache-airflow-providers-tableau/index.rst
+++ b/docs/apache-airflow-providers-tableau/index.rst
@@ -24,12 +24,6 @@ Content
 
 .. toctree::
     :maxdepth: 1
-    :caption: Guides
-
-    Connection types <connections/tableau>
-
-.. toctree::
-    :maxdepth: 1
     :caption: References
 
     Python API <_api/airflow/providers/tableau/index>


[airflow] 24/41: Support google-cloud-bigquery-datatransfer>=3.0.0 (#13337)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit ce5c00fbed5d7179c15059916758217f3c9d8f51
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Thu Dec 31 18:07:32 2020 +0100

    Support google-cloud-bigquery-datatransfer>=3.0.0 (#13337)
    
    (cherry picked from commit 9de71270838ad3cc59043f1ab0bb6ca97af13622)
---
 airflow/providers/google/ADDITIONAL_INFO.md        |  1 +
 .../cloud/example_dags/example_bigquery_dts.py     | 20 ++++------
 .../providers/google/cloud/hooks/bigquery_dts.py   | 45 ++++++++++++++--------
 .../google/cloud/operators/bigquery_dts.py         | 12 +++---
 .../providers/google/cloud/sensors/bigquery_dts.py | 35 ++++++++++++-----
 setup.py                                           |  2 +-
 .../google/cloud/hooks/test_bigquery_dts.py        | 39 ++++++++-----------
 .../google/cloud/operators/test_bigquery_dts.py    | 37 +++++++++++++-----
 .../google/cloud/sensors/test_bigquery_dts.py      | 39 ++++++++++++++++---
 9 files changed, 142 insertions(+), 88 deletions(-)

diff --git a/airflow/providers/google/ADDITIONAL_INFO.md b/airflow/providers/google/ADDITIONAL_INFO.md
index b54b240..eca05df 100644
--- a/airflow/providers/google/ADDITIONAL_INFO.md
+++ b/airflow/providers/google/ADDITIONAL_INFO.md
@@ -29,6 +29,7 @@ Details are covered in the UPDATING.md files for each library, but there are som
 
 | Library name | Previous constraints | Current constraints | |
 | --- | --- | --- | --- |
+| [``google-cloud-bigquery-datatransfer``](https://pypi.org/project/google-cloud-bigquery-datatransfer/) | ``>=0.4.0,<2.0.0`` | ``>=3.0.0,<4.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-bigquery-datatransfer/blob/master/UPGRADING.md) |
 | [``google-cloud-datacatalog``](https://pypi.org/project/google-cloud-datacatalog/) | ``>=0.5.0,<0.8`` | ``>=1.0.0,<2.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-datacatalog/blob/master/UPGRADING.md) |
 | [``google-cloud-os-login``](https://pypi.org/project/google-cloud-os-login/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-oslogin/blob/master/UPGRADING.md) |
 | [``google-cloud-pubsub``](https://pypi.org/project/google-cloud-pubsub/) | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0``  | [`UPGRADING.md`](https://github.com/googleapis/python-pubsub/blob/master/UPGRADING.md) |
diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py
index 260dc5d..da13c9d 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py
@@ -22,9 +22,6 @@ Example Airflow DAG that creates and deletes Bigquery data transfer configuratio
 import os
 import time
 
-from google.cloud.bigquery_datatransfer_v1.types import TransferConfig
-from google.protobuf.json_format import ParseDict
-
 from airflow import models
 from airflow.providers.google.cloud.operators.bigquery_dts import (
     BigQueryCreateDataTransferOperator,
@@ -55,16 +52,13 @@ PARAMS = {
     "file_format": "CSV",
 }
 
-TRANSFER_CONFIG = ParseDict(
-    {
-        "destination_dataset_id": GCP_DTS_BQ_DATASET,
-        "display_name": "GCS Test Config",
-        "data_source_id": "google_cloud_storage",
-        "schedule_options": schedule_options,
-        "params": PARAMS,
-    },
-    TransferConfig(),
-)
+TRANSFER_CONFIG = {
+    "destination_dataset_id": GCP_DTS_BQ_DATASET,
+    "display_name": "GCS Test Config",
+    "data_source_id": "google_cloud_storage",
+    "schedule_options": schedule_options,
+    "params": PARAMS,
+}
 
 # [END howto_bigquery_dts_create_args]
 
diff --git a/airflow/providers/google/cloud/hooks/bigquery_dts.py b/airflow/providers/google/cloud/hooks/bigquery_dts.py
index 2d8d12b..37d42ef 100644
--- a/airflow/providers/google/cloud/hooks/bigquery_dts.py
+++ b/airflow/providers/google/cloud/hooks/bigquery_dts.py
@@ -27,7 +27,6 @@ from google.cloud.bigquery_datatransfer_v1.types import (
     TransferConfig,
     TransferRun,
 )
-from google.protobuf.json_format import MessageToDict, ParseDict
 from googleapiclient.discovery import Resource
 
 from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
@@ -71,7 +70,7 @@ class BiqQueryDataTransferServiceHook(GoogleBaseHook):
         :param config: Data transfer configuration to create.
         :type config: Union[dict, google.cloud.bigquery_datatransfer_v1.types.TransferConfig]
         """
-        config = MessageToDict(config) if isinstance(config, TransferConfig) else config
+        config = TransferConfig.to_dict(config) if isinstance(config, TransferConfig) else config
         new_config = copy(config)
         schedule_options = new_config.get("schedule_options")
         if schedule_options:
@@ -80,7 +79,11 @@ class BiqQueryDataTransferServiceHook(GoogleBaseHook):
                 schedule_options["disable_auto_scheduling"] = True
         else:
             new_config["schedule_options"] = {"disable_auto_scheduling": True}
-        return ParseDict(new_config, TransferConfig())
+        # HACK: TransferConfig.to_dict returns invalid representation
+        # See: https://github.com/googleapis/python-bigquery-datatransfer/issues/90
+        if isinstance(new_config.get('user_id'), str):
+            new_config['user_id'] = int(new_config['user_id'])
+        return TransferConfig(**new_config)
 
     def get_conn(self) -> DataTransferServiceClient:
         """
@@ -129,14 +132,16 @@ class BiqQueryDataTransferServiceHook(GoogleBaseHook):
         :return: A ``google.cloud.bigquery_datatransfer_v1.types.TransferConfig`` instance.
         """
         client = self.get_conn()
-        parent = client.project_path(project_id)
+        parent = f"projects/{project_id}"
         return client.create_transfer_config(
-            parent=parent,
-            transfer_config=self._disable_auto_scheduling(transfer_config),
-            authorization_code=authorization_code,
+            request={
+                'parent': parent,
+                'transfer_config': self._disable_auto_scheduling(transfer_config),
+                'authorization_code': authorization_code,
+            },
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -169,8 +174,10 @@ class BiqQueryDataTransferServiceHook(GoogleBaseHook):
         :return: None
         """
         client = self.get_conn()
-        name = client.project_transfer_config_path(project=project_id, transfer_config=transfer_config_id)
-        return client.delete_transfer_config(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        name = f"projects/{project_id}/transferConfigs/{transfer_config_id}"
+        return client.delete_transfer_config(
+            request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
 
     @GoogleBaseHook.fallback_to_default_project_id
     def start_manual_transfer_runs(
@@ -216,14 +223,16 @@ class BiqQueryDataTransferServiceHook(GoogleBaseHook):
         :return: An ``google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse`` instance.
         """
         client = self.get_conn()
-        parent = client.project_transfer_config_path(project=project_id, transfer_config=transfer_config_id)
+        parent = f"projects/{project_id}/transferConfigs/{transfer_config_id}"
         return client.start_manual_transfer_runs(
-            parent=parent,
-            requested_time_range=requested_time_range,
-            requested_run_time=requested_run_time,
+            request={
+                'parent': parent,
+                'requested_time_range': requested_time_range,
+                'requested_run_time': requested_run_time,
+            },
             retry=retry,
             timeout=timeout,
-            metadata=metadata,
+            metadata=metadata or (),
         )
 
     @GoogleBaseHook.fallback_to_default_project_id
@@ -259,5 +268,7 @@ class BiqQueryDataTransferServiceHook(GoogleBaseHook):
         :return: An ``google.cloud.bigquery_datatransfer_v1.types.TransferRun`` instance.
         """
         client = self.get_conn()
-        name = client.project_run_path(project=project_id, transfer_config=transfer_config_id, run=run_id)
-        return client.get_transfer_run(name=name, retry=retry, timeout=timeout, metadata=metadata)
+        name = f"projects/{project_id}/transferConfigs/{transfer_config_id}/runs/{run_id}"
+        return client.get_transfer_run(
+            request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()
+        )
diff --git a/airflow/providers/google/cloud/operators/bigquery_dts.py b/airflow/providers/google/cloud/operators/bigquery_dts.py
index e941bd4..656fc77 100644
--- a/airflow/providers/google/cloud/operators/bigquery_dts.py
+++ b/airflow/providers/google/cloud/operators/bigquery_dts.py
@@ -19,7 +19,7 @@
 from typing import Optional, Sequence, Tuple, Union
 
 from google.api_core.retry import Retry
-from google.protobuf.json_format import MessageToDict
+from google.cloud.bigquery_datatransfer_v1 import StartManualTransferRunsResponse, TransferConfig
 
 from airflow.models import BaseOperator
 from airflow.providers.google.cloud.hooks.bigquery_dts import BiqQueryDataTransferServiceHook, get_object_id
@@ -110,7 +110,7 @@ class BigQueryCreateDataTransferOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        result = MessageToDict(response)
+        result = TransferConfig.to_dict(response)
         self.log.info("Created DTS transfer config %s", get_object_id(result))
         self.xcom_push(context, key="transfer_config_id", value=get_object_id(result))
         return result
@@ -289,10 +289,8 @@ class BigQueryDataTransferServiceStartTransferRunsOperator(BaseOperator):
             timeout=self.timeout,
             metadata=self.metadata,
         )
-        result = MessageToDict(response)
-        run_id = None
-        if 'runs' in result:
-            run_id = get_object_id(result['runs'][0])
-            self.xcom_push(context, key="run_id", value=run_id)
+        result = StartManualTransferRunsResponse.to_dict(response)
+        run_id = get_object_id(result['runs'][0])
+        self.xcom_push(context, key="run_id", value=run_id)
         self.log.info('Transfer run %s submitted successfully.', run_id)
         return result
diff --git a/airflow/providers/google/cloud/sensors/bigquery_dts.py b/airflow/providers/google/cloud/sensors/bigquery_dts.py
index 5b851ed..49e124c 100644
--- a/airflow/providers/google/cloud/sensors/bigquery_dts.py
+++ b/airflow/providers/google/cloud/sensors/bigquery_dts.py
@@ -19,7 +19,7 @@
 from typing import Optional, Sequence, Set, Tuple, Union
 
 from google.api_core.retry import Retry
-from google.protobuf.json_format import MessageToDict
+from google.cloud.bigquery_datatransfer_v1 import TransferState
 
 from airflow.providers.google.cloud.hooks.bigquery_dts import BiqQueryDataTransferServiceHook
 from airflow.sensors.base import BaseSensorOperator
@@ -81,7 +81,9 @@ class BigQueryDataTransferServiceTransferRunSensor(BaseSensorOperator):
         *,
         run_id: str,
         transfer_config_id: str,
-        expected_statuses: Union[Set[str], str] = 'SUCCEEDED',
+        expected_statuses: Union[
+            Set[Union[str, TransferState, int]], str, TransferState, int
+        ] = TransferState.SUCCEEDED,
         project_id: Optional[str] = None,
         gcp_conn_id: str = "google_cloud_default",
         retry: Optional[Retry] = None,
@@ -96,13 +98,29 @@ class BigQueryDataTransferServiceTransferRunSensor(BaseSensorOperator):
         self.retry = retry
         self.request_timeout = request_timeout
         self.metadata = metadata
-        self.expected_statuses = (
-            {expected_statuses} if isinstance(expected_statuses, str) else expected_statuses
-        )
+        self.expected_statuses = self._normalize_state_list(expected_statuses)
         self.project_id = project_id
         self.gcp_cloud_conn_id = gcp_conn_id
         self.impersonation_chain = impersonation_chain
 
+    def _normalize_state_list(self, states) -> Set[TransferState]:
+        states = {states} if isinstance(states, (str, TransferState, int)) else states
+        result = set()
+        for state in states:
+            if isinstance(state, str):
+                result.add(TransferState[state.upper()])
+            elif isinstance(state, int):
+                result.add(TransferState(state))
+            elif isinstance(state, TransferState):
+                result.add(state)
+            else:
+                raise TypeError(
+                    f"Unsupported type. "
+                    f"Expected: str, int, google.cloud.bigquery_datatransfer_v1.TransferState."
+                    f"Current type: {type(state)}"
+                )
+        return result
+
     def poke(self, context: dict) -> bool:
         hook = BiqQueryDataTransferServiceHook(
             gcp_conn_id=self.gcp_cloud_conn_id,
@@ -116,8 +134,5 @@ class BigQueryDataTransferServiceTransferRunSensor(BaseSensorOperator):
             timeout=self.request_timeout,
             metadata=self.metadata,
         )
-        result = MessageToDict(run)
-        state = result["state"]
-        self.log.info("Status of %s run: %s", self.run_id, state)
-
-        return state in self.expected_statuses
+        self.log.info("Status of %s run: %s", self.run_id, str(run.state))
+        return run.state in self.expected_statuses
diff --git a/setup.py b/setup.py
index 3df9e47..628ecd1 100644
--- a/setup.py
+++ b/setup.py
@@ -284,7 +284,7 @@ google = [
     'google-auth>=1.0.0,<2.0.0',
     'google-auth-httplib2>=0.0.1',
     'google-cloud-automl>=0.4.0,<2.0.0',
-    'google-cloud-bigquery-datatransfer>=0.4.0,<2.0.0',
+    'google-cloud-bigquery-datatransfer>=3.0.0,<4.0.0',
     'google-cloud-bigtable>=1.0.0,<2.0.0',
     'google-cloud-container>=0.1.1,<2.0.0',
     'google-cloud-datacatalog>=1.0.0,<2.0.0',
diff --git a/tests/providers/google/cloud/hooks/test_bigquery_dts.py b/tests/providers/google/cloud/hooks/test_bigquery_dts.py
index 64ad79c..b53cb76 100644
--- a/tests/providers/google/cloud/hooks/test_bigquery_dts.py
+++ b/tests/providers/google/cloud/hooks/test_bigquery_dts.py
@@ -20,9 +20,7 @@ import unittest
 from copy import deepcopy
 from unittest import mock
 
-from google.cloud.bigquery_datatransfer_v1 import DataTransferServiceClient
 from google.cloud.bigquery_datatransfer_v1.types import TransferConfig
-from google.protobuf.json_format import ParseDict
 
 from airflow.providers.google.cloud.hooks.bigquery_dts import BiqQueryDataTransferServiceHook
 from airflow.version import version
@@ -33,21 +31,18 @@ PROJECT_ID = "id"
 
 PARAMS = {
     "field_delimiter": ",",
-    "max_bad_records": "0",
-    "skip_leading_rows": "1",
+    "max_bad_records": 0,
+    "skip_leading_rows": 1,
     "data_path_template": "bucket",
     "destination_table_name_template": "name",
     "file_format": "CSV",
 }
 
-TRANSFER_CONFIG = ParseDict(
-    {
-        "destination_dataset_id": "dataset",
-        "display_name": "GCS Test Config",
-        "data_source_id": "google_cloud_storage",
-        "params": PARAMS,
-    },
-    TransferConfig(),
+TRANSFER_CONFIG = TransferConfig(
+    destination_dataset_id="dataset",
+    display_name="GCS Test Config",
+    data_source_id="google_cloud_storage",
+    params=PARAMS,
 )
 
 TRANSFER_CONFIG_ID = "id1234"
@@ -77,14 +72,12 @@ class BigQueryDataTransferHookTestCase(unittest.TestCase):
     )
     def test_create_transfer_config(self, service_mock):
         self.hook.create_transfer_config(transfer_config=TRANSFER_CONFIG, project_id=PROJECT_ID)
-        parent = DataTransferServiceClient.project_path(PROJECT_ID)
+        parent = f"projects/{PROJECT_ID}"
         expected_config = deepcopy(TRANSFER_CONFIG)
         expected_config.schedule_options.disable_auto_scheduling = True
         service_mock.assert_called_once_with(
-            parent=parent,
-            transfer_config=expected_config,
-            authorization_code=None,
-            metadata=None,
+            request=dict(parent=parent, transfer_config=expected_config, authorization_code=None),
+            metadata=(),
             retry=None,
             timeout=None,
         )
@@ -96,8 +89,8 @@ class BigQueryDataTransferHookTestCase(unittest.TestCase):
     def test_delete_transfer_config(self, service_mock):
         self.hook.delete_transfer_config(transfer_config_id=TRANSFER_CONFIG_ID, project_id=PROJECT_ID)
 
-        name = DataTransferServiceClient.project_transfer_config_path(PROJECT_ID, TRANSFER_CONFIG_ID)
-        service_mock.assert_called_once_with(name=name, metadata=None, retry=None, timeout=None)
+        name = f"projects/{PROJECT_ID}/transferConfigs/{TRANSFER_CONFIG_ID}"
+        service_mock.assert_called_once_with(request=dict(name=name), metadata=(), retry=None, timeout=None)
 
     @mock.patch(
         "airflow.providers.google.cloud.hooks.bigquery_dts."
@@ -106,12 +99,10 @@ class BigQueryDataTransferHookTestCase(unittest.TestCase):
     def test_start_manual_transfer_runs(self, service_mock):
         self.hook.start_manual_transfer_runs(transfer_config_id=TRANSFER_CONFIG_ID, project_id=PROJECT_ID)
 
-        parent = DataTransferServiceClient.project_transfer_config_path(PROJECT_ID, TRANSFER_CONFIG_ID)
+        parent = f"projects/{PROJECT_ID}/transferConfigs/{TRANSFER_CONFIG_ID}"
         service_mock.assert_called_once_with(
-            parent=parent,
-            requested_time_range=None,
-            requested_run_time=None,
-            metadata=None,
+            request=dict(parent=parent, requested_time_range=None, requested_run_time=None),
+            metadata=(),
             retry=None,
             timeout=None,
         )
diff --git a/tests/providers/google/cloud/operators/test_bigquery_dts.py b/tests/providers/google/cloud/operators/test_bigquery_dts.py
index 4d42352..d6071fa 100644
--- a/tests/providers/google/cloud/operators/test_bigquery_dts.py
+++ b/tests/providers/google/cloud/operators/test_bigquery_dts.py
@@ -18,6 +18,8 @@
 import unittest
 from unittest import mock
 
+from google.cloud.bigquery_datatransfer_v1 import StartManualTransferRunsResponse, TransferConfig, TransferRun
+
 from airflow.providers.google.cloud.operators.bigquery_dts import (
     BigQueryCreateDataTransferOperator,
     BigQueryDataTransferServiceStartTransferRunsOperator,
@@ -39,20 +41,23 @@ TRANSFER_CONFIG = {
 
 TRANSFER_CONFIG_ID = "id1234"
 
-NAME = "projects/123abc/locations/321cba/transferConfig/1a2b3c"
+TRANSFER_CONFIG_NAME = "projects/123abc/locations/321cba/transferConfig/1a2b3c"
+RUN_NAME = "projects/123abc/locations/321cba/transferConfig/1a2b3c/runs/123"
 
 
 class BigQueryCreateDataTransferOperatorTestCase(unittest.TestCase):
-    @mock.patch("airflow.providers.google.cloud.operators.bigquery_dts.BiqQueryDataTransferServiceHook")
-    @mock.patch("airflow.providers.google.cloud.operators.bigquery_dts.get_object_id")
-    def test_execute(self, mock_name, mock_hook):
-        mock_name.return_value = TRANSFER_CONFIG_ID
-        mock_xcom = mock.MagicMock()
+    @mock.patch(
+        "airflow.providers.google.cloud.operators.bigquery_dts.BiqQueryDataTransferServiceHook",
+        **{'return_value.create_transfer_config.return_value': TransferConfig(name=TRANSFER_CONFIG_NAME)},
+    )
+    def test_execute(self, mock_hook):
         op = BigQueryCreateDataTransferOperator(
             transfer_config=TRANSFER_CONFIG, project_id=PROJECT_ID, task_id="id"
         )
-        op.xcom_push = mock_xcom
-        op.execute(None)
+        ti = mock.MagicMock()
+
+        op.execute({'ti': ti})
+
         mock_hook.return_value.create_transfer_config.assert_called_once_with(
             authorization_code=None,
             metadata=None,
@@ -61,6 +66,7 @@ class BigQueryCreateDataTransferOperatorTestCase(unittest.TestCase):
             retry=None,
             timeout=None,
         )
+        ti.xcom_push.assert_called_once_with(execution_date=None, key='transfer_config_id', value='1a2b3c')
 
 
 class BigQueryDeleteDataTransferConfigOperatorTestCase(unittest.TestCase):
@@ -80,12 +86,22 @@ class BigQueryDeleteDataTransferConfigOperatorTestCase(unittest.TestCase):
 
 
 class BigQueryDataTransferServiceStartTransferRunsOperatorTestCase(unittest.TestCase):
-    @mock.patch("airflow.providers.google.cloud.operators.bigquery_dts.BiqQueryDataTransferServiceHook")
+    @mock.patch(
+        "airflow.providers.google.cloud.operators.bigquery_dts.BiqQueryDataTransferServiceHook",
+        **{
+            'return_value.start_manual_transfer_runs.return_value': StartManualTransferRunsResponse(
+                runs=[TransferRun(name=RUN_NAME)]
+            )
+        },
+    )
     def test_execute(self, mock_hook):
         op = BigQueryDataTransferServiceStartTransferRunsOperator(
             transfer_config_id=TRANSFER_CONFIG_ID, task_id="id", project_id=PROJECT_ID
         )
-        op.execute(None)
+        ti = mock.MagicMock()
+
+        op.execute({'ti': ti})
+
         mock_hook.return_value.start_manual_transfer_runs.assert_called_once_with(
             transfer_config_id=TRANSFER_CONFIG_ID,
             project_id=PROJECT_ID,
@@ -95,3 +111,4 @@ class BigQueryDataTransferServiceStartTransferRunsOperatorTestCase(unittest.Test
             retry=None,
             timeout=None,
         )
+        ti.xcom_push.assert_called_once_with(execution_date=None, key='run_id', value='123')
diff --git a/tests/providers/google/cloud/sensors/test_bigquery_dts.py b/tests/providers/google/cloud/sensors/test_bigquery_dts.py
index 92a116e..c8a0548 100644
--- a/tests/providers/google/cloud/sensors/test_bigquery_dts.py
+++ b/tests/providers/google/cloud/sensors/test_bigquery_dts.py
@@ -19,6 +19,8 @@
 import unittest
 from unittest import mock
 
+from google.cloud.bigquery_datatransfer_v1 import TransferState
+
 from airflow.providers.google.cloud.sensors.bigquery_dts import BigQueryDataTransferServiceTransferRunSensor
 
 TRANSFER_CONFIG_ID = "config_id"
@@ -27,20 +29,45 @@ PROJECT_ID = "project_id"
 
 
 class TestBigQueryDataTransferServiceTransferRunSensor(unittest.TestCase):
-    @mock.patch("airflow.providers.google.cloud.sensors.bigquery_dts.BiqQueryDataTransferServiceHook")
     @mock.patch(
-        "airflow.providers.google.cloud.sensors.bigquery_dts.MessageToDict",
-        return_value={"state": "success"},
+        "airflow.providers.google.cloud.sensors.bigquery_dts.BiqQueryDataTransferServiceHook",
+        **{'return_value.get_transfer_run.return_value.state': TransferState.FAILED},
+    )
+    def test_poke_returns_false(self, mock_hook):
+        op = BigQueryDataTransferServiceTransferRunSensor(
+            transfer_config_id=TRANSFER_CONFIG_ID,
+            run_id=RUN_ID,
+            task_id="id",
+            project_id=PROJECT_ID,
+            expected_statuses={"SUCCEEDED"},
+        )
+        result = op.poke({})
+
+        self.assertEqual(result, False)
+        mock_hook.return_value.get_transfer_run.assert_called_once_with(
+            transfer_config_id=TRANSFER_CONFIG_ID,
+            run_id=RUN_ID,
+            project_id=PROJECT_ID,
+            metadata=None,
+            retry=None,
+            timeout=None,
+        )
+
+    @mock.patch(
+        "airflow.providers.google.cloud.sensors.bigquery_dts.BiqQueryDataTransferServiceHook",
+        **{'return_value.get_transfer_run.return_value.state': TransferState.SUCCEEDED},
     )
-    def test_poke(self, mock_msg_to_dict, mock_hook):
+    def test_poke_returns_true(self, mock_hook):
         op = BigQueryDataTransferServiceTransferRunSensor(
             transfer_config_id=TRANSFER_CONFIG_ID,
             run_id=RUN_ID,
             task_id="id",
             project_id=PROJECT_ID,
-            expected_statuses={"success"},
+            expected_statuses={"SUCCEEDED"},
         )
-        op.poke(None)
+        result = op.poke({})
+
+        self.assertEqual(result, True)
         mock_hook.return_value.get_transfer_run.assert_called_once_with(
             transfer_config_id=TRANSFER_CONFIG_ID,
             run_id=RUN_ID,


[airflow] 01/41: Disable health checks for ad-hoc containers (#14536)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit a0c14a3c5c8653b5efef18194ee90d61e7255ba4
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Mon Mar 1 23:03:01 2021 +0100

    Disable health checks for ad-hoc containers (#14536)
    
    Co-authored-by: Kamil Breguła <ka...@apache.org>
    (cherry picked from commit 164ac4d08a62680012e80bf11ea3e2764e8c3af5)
---
 docs/apache-airflow/production-deployment.rst | 4 +++-
 docs/apache-airflow/start/airflow.sh          | 2 +-
 scripts/in_container/prod/entrypoint_prod.sh  | 7 +++++--
 3 files changed, 9 insertions(+), 4 deletions(-)

diff --git a/docs/apache-airflow/production-deployment.rst b/docs/apache-airflow/production-deployment.rst
index 8d7be95..34b12cb 100644
--- a/docs/apache-airflow/production-deployment.rst
+++ b/docs/apache-airflow/production-deployment.rst
@@ -810,6 +810,7 @@ available. This happens always when you use the default entrypoint.
 The script detects backend type depending on the URL schema and assigns default port numbers if not specified
 in the URL. Then it loops until the connection to the host/port specified can be established
 It tries ``CONNECTION_CHECK_MAX_COUNT`` times and sleeps ``CONNECTION_CHECK_SLEEP_TIME`` between checks
+To disable check, set ``CONNECTION_CHECK_MAX_COUNT=0``.
 
 Supported schemes:
 
@@ -894,7 +895,8 @@ commands are used the entrypoint will wait until the celery broker DB connection
 
 The script detects backend type depending on the URL schema and assigns default port numbers if not specified
 in the URL. Then it loops until connection to the host/port specified can be established
-It tries ``CONNECTION_CHECK_MAX_COUNT`` times and sleeps ``CONNECTION_CHECK_SLEEP_TIME`` between checks
+It tries ``CONNECTION_CHECK_MAX_COUNT`` times and sleeps ``CONNECTION_CHECK_SLEEP_TIME`` between checks.
+To disable check, set ``CONNECTION_CHECK_MAX_COUNT=0``.
 
 Supported schemes:
 
diff --git a/docs/apache-airflow/start/airflow.sh b/docs/apache-airflow/start/airflow.sh
index b563139..4aa1d71 100755
--- a/docs/apache-airflow/start/airflow.sh
+++ b/docs/apache-airflow/start/airflow.sh
@@ -25,4 +25,4 @@ PROJECT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
 set -euo pipefail
 
 export COMPOSE_FILE="${PROJECT_DIR}/docker-compose.yaml"
-exec docker-compose run airflow-worker "${@}"
+exec docker-compose run --rm -e CONNECTION_CHECK_MAX_COUNT=0 airflow-worker "${@}"
diff --git a/scripts/in_container/prod/entrypoint_prod.sh b/scripts/in_container/prod/entrypoint_prod.sh
index 12214be..1dab579 100755
--- a/scripts/in_container/prod/entrypoint_prod.sh
+++ b/scripts/in_container/prod/entrypoint_prod.sh
@@ -239,7 +239,9 @@ readonly CONNECTION_CHECK_SLEEP_TIME
 
 create_system_user_if_missing
 set_pythonpath_for_root_user
-wait_for_airflow_db
+if [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]]; then
+    wait_for_airflow_db
+fi
 
 if [[ -n "${_AIRFLOW_DB_UPGRADE=}" ]] ; then
     upgrade_db
@@ -265,7 +267,8 @@ if [[ ${AIRFLOW_COMMAND} == "airflow" ]]; then
 fi
 
 # Note: the broker backend configuration concerns only a subset of Airflow components
-if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|celery|worker|flower)$ ]]; then
+if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|celery|worker|flower)$ ]] \
+    && [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]]; then
     wait_for_celery_backend "${@}"
 fi